Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36ab8ab68b | ||
|
|
27a832bbd1 | ||
|
|
18df9e288a | ||
|
|
7b786be892 | ||
|
|
374a930745 | ||
|
|
6d12e5c939 | ||
|
|
bcf37067ad | ||
|
|
a1ac0c2f88 | ||
|
|
cfe190ca5b | ||
|
|
c002d04328 | ||
|
|
0d1df4f9e5 | ||
|
|
59cc834a81 | ||
|
|
dc54d9faef | ||
|
|
89bf5cb3f1 | ||
|
|
c72ea9eb20 | ||
|
|
897387e39e | ||
|
|
4454a10f78 | ||
|
|
c9814f7cdc | ||
|
|
187d8bcf28 | ||
|
|
204d426663 | ||
|
|
29e9afa47e | ||
|
|
a6943c027f | ||
|
|
70e4fa73e1 | ||
|
|
579fa4715b | ||
|
|
0100bab04f | ||
|
|
bdf97fa181 | ||
|
|
eb1587fa7d | ||
|
|
5827cda316 | ||
|
|
0e9ec7a66a | ||
|
|
155957f0c5 | ||
|
|
a8b46f191b | ||
|
|
3862ce3405 | ||
|
|
5403b51a5b | ||
|
|
1270400e95 | ||
|
|
3d2bb1c528 | ||
|
|
7c68f02cfd | ||
|
|
ccd6447869 | ||
|
|
056c02c5a5 | ||
|
|
52a798e4c8 | ||
|
|
fdfdef5837 | ||
|
|
ff301f225c | ||
|
|
87f720f567 | ||
|
|
fecb46c02c | ||
|
|
cce2399b07 | ||
|
|
df1db99ec0 | ||
|
|
5f2619500b | ||
|
|
843675fa1e | ||
|
|
2aa370c8ac | ||
|
|
c25ff4a3aa | ||
|
|
5e32a70c3e | ||
|
|
3f6692a1cd | ||
|
|
eb32b34b59 | ||
|
|
9eedf03c01 | ||
|
|
5ec7a8b530 | ||
|
|
f2f330dbd9 | ||
|
|
2e0e056489 | ||
|
|
d8685fe9a0 | ||
|
|
e52a019d5e | ||
|
|
0d5cef1537 | ||
|
|
e8f33eee4d | ||
|
|
a3b71b174c | ||
|
|
e2ce0e9acb | ||
|
|
f47a183c11 | ||
|
|
a9ea237cf3 | ||
|
|
6db1652dd3 | ||
|
|
e3b95a8076 | ||
|
|
0fe2a8516f | ||
|
|
51bbaba162 | ||
|
|
77802da417 | ||
|
|
3e21858cb7 | ||
|
|
2003f28fd1 | ||
|
|
172b50bf77 | ||
|
|
ec5694fedf | ||
|
|
968f774092 | ||
|
|
757dc9aa3c | ||
|
|
3d938562a6 | ||
|
|
964a62d4b4 | ||
|
|
99aa38fcfe | ||
|
|
5658514c8a | ||
|
|
109ee7d5e1 | ||
|
|
f4fa02fe11 | ||
|
|
487ef504a8 | ||
|
|
c98e407ed7 | ||
|
|
6b6ce1d43f | ||
|
|
e491ac7458 | ||
|
|
e9d8cbafc2 | ||
|
|
aa705592b2 | ||
|
|
008e1262bb | ||
|
|
91a71ad004 | ||
|
|
bbf479a440 | ||
|
|
983d392ba8 | ||
|
|
56dd9dd8aa | ||
|
|
20028c379d | ||
|
|
e48046795e | ||
|
|
1ac9e45c73 | ||
|
|
488415436c | ||
|
|
d7c5c2cd6e | ||
|
|
5388fd0d2b | ||
|
|
e74b5c773a | ||
|
|
bb6cd159f1 | ||
|
|
4a09f02882 | ||
|
|
9180bb1d9e | ||
|
|
ece68b4b99 | ||
|
|
0c95b5e3cb | ||
|
|
0450bb1570 | ||
|
|
2f5a640c4c | ||
|
|
78964be506 | ||
|
|
fbdf6de63c | ||
|
|
9b04dcd41e | ||
|
|
32e36d4962 | ||
|
|
62f28f79db | ||
|
|
e88965b69d | ||
|
|
e422d627af | ||
|
|
2e31ebb7d9 | ||
|
|
71adc8bea2 | ||
|
|
bfa0acedab | ||
|
|
416b9ee815 | ||
|
|
d1d2ac2614 | ||
|
|
fa35fa53e2 | ||
|
|
c0e5a77d6f | ||
|
|
96cc7b31b4 | ||
|
|
0d6d312f68 | ||
|
|
be3011c565 | ||
|
|
09fae01985 | ||
|
|
12c0c9763d | ||
|
|
15322b742d | ||
|
|
85d6229aa6 | ||
|
|
083fab0a29 | ||
|
|
2a1aff589d | ||
|
|
3cd7198747 | ||
|
|
1e02f12794 | ||
|
|
e4a3cf341f | ||
|
|
7ddcd97f68 | ||
|
|
80da96657c | ||
|
|
861e89f37a | ||
|
|
c00cf9a6ff | ||
|
|
ed7f3d8509 | ||
|
|
456090fee9 | ||
|
|
e69ef376ae | ||
|
|
5f78771f66 | ||
|
|
09ceae90ec | ||
|
|
c34351270c | ||
|
|
743c18bdd7 | ||
|
|
12d7ddab08 | ||
|
|
e40646c664 | ||
|
|
ab9b4257f2 | ||
|
|
a2effca2b0 | ||
|
|
f76703340c | ||
|
|
dbc1fa00f7 | ||
|
|
4ecc1a191f | ||
|
|
4f8e3f845c | ||
|
|
132856a8f0 | ||
|
|
b1f75bb786 | ||
|
|
0011a0f92e | ||
|
|
b7ab364aab | ||
|
|
52e4647433 | ||
|
|
b2b3d426ed | ||
|
|
7ae3ff504d | ||
|
|
50f3686c12 | ||
|
|
b0c3c4cad9 | ||
|
|
9b8279da85 | ||
|
|
97e21b2ea8 | ||
|
|
fb75de5b51 | ||
|
|
3eb9c6476b | ||
|
|
c7ec929d05 | ||
|
|
ca19fcc63f | ||
|
|
7904d0d7d9 | ||
|
|
8526e74aa7 | ||
|
|
6e16d84ba4 | ||
|
|
6765246231 | ||
|
|
f50976358b | ||
|
|
aa37fa8533 | ||
|
|
2ebdb77433 | ||
|
|
5ce30cfeaa | ||
|
|
82d6a7f895 | ||
|
|
9aad9dfbc6 | ||
|
|
86bac8f9e5 | ||
|
|
348ff092aa | ||
|
|
1ac08bf97d | ||
|
|
42357d7901 | ||
|
|
014a7b9492 | ||
|
|
e0d710644b | ||
|
|
4a9dba30d7 | ||
|
|
9663dea79d | ||
|
|
81e40860df | ||
|
|
9021e3a903 | ||
|
|
2136d1a157 |
28
.github/workflows/publish-to-pypi.yaml
vendored
@@ -70,9 +70,9 @@ jobs:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
- name: Sign the dists with Sigstore
|
||||
uses: sigstore/gh-action-sigstore-python@v1.2.3
|
||||
uses: sigstore/gh-action-sigstore-python@v3.0.0
|
||||
with:
|
||||
inputs: >-
|
||||
inputs: |
|
||||
./dist/*.tar.gz
|
||||
./dist/*.whl
|
||||
- name: Upload artifact signatures to GitHub Release
|
||||
@@ -85,27 +85,3 @@ jobs:
|
||||
gh release upload
|
||||
'${{ github.ref_name }}' dist/**
|
||||
--repo '${{ github.repository }}'
|
||||
|
||||
# publish-to-testpypi:
|
||||
# name: Publish Python 🐍 distribution 📦 to TestPyPI
|
||||
# needs:
|
||||
# - build
|
||||
# runs-on: ubuntu-latest
|
||||
#
|
||||
# environment:
|
||||
# name: testpypi
|
||||
# url: https://test.pypi.org/p/pydase
|
||||
#
|
||||
# permissions:
|
||||
# id-token: write # IMPORTANT: mandatory for trusted publishing
|
||||
#
|
||||
# steps:
|
||||
# - name: Download all the dists
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: python-package-distributions
|
||||
# path: dist/
|
||||
# - name: Publish distribution 📦 to TestPyPI
|
||||
# uses: pypa/gh-action-pypi-publish@release/v1
|
||||
# with:
|
||||
# repository-url: https://test.pypi.org/legacy/
|
||||
|
||||
9
.github/workflows/python-package.yml
vendored
@@ -20,9 +20,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: chartboost/ruff-action@v1
|
||||
with:
|
||||
src: "./src"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
@@ -32,6 +29,12 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
poetry install --with dev
|
||||
- name: Check with ruff
|
||||
run: |
|
||||
poetry run ruff check src
|
||||
- name: Check formatting with ruff
|
||||
run: |
|
||||
poetry run ruff format --check src
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
|
||||
4
LICENSE
@@ -1,6 +1,4 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Mose Müller <mosemueller@gmail.com>
|
||||
Copyright (c) 2023-2024 Mose Müller <mosemueller@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Overview
|
||||
|
||||
The Observer Pattern is a fundamental design pattern in the `pydase` package, serving as the central communication mechanism for state updates to clients connected to a service.
|
||||
The [Observer Pattern](https://en.wikipedia.org/wiki/Observer_pattern) is a fundamental design pattern in the `pydase` package, serving as the central communication mechanism for state updates to clients connected to a service.
|
||||
|
||||
## How it Works
|
||||
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
::: pydase.data_service
|
||||
handler: python
|
||||
|
||||
::: pydase.server.server
|
||||
handler: python
|
||||
|
||||
::: pydase.server.web_server
|
||||
handler: python
|
||||
|
||||
::: pydase.client
|
||||
handler: python
|
||||
|
||||
::: pydase.components
|
||||
handler: python
|
||||
|
||||
::: pydase.task
|
||||
handler: python
|
||||
options:
|
||||
inherited_members: false
|
||||
show_submodules: true
|
||||
|
||||
::: pydase.utils.serialization.serializer
|
||||
handler: python
|
||||
|
||||
::: pydase.utils.serialization.deserializer
|
||||
handler: python
|
||||
options:
|
||||
show_root_heading: true
|
||||
show_root_toc_entry: false
|
||||
show_symbol_type_heading: true
|
||||
show_symbol_type_toc: true
|
||||
|
||||
::: pydase.utils.serialization.types
|
||||
handler: python
|
||||
|
||||
::: pydase.utils.decorators
|
||||
handler: python
|
||||
options:
|
||||
filters: ["!render_in_frontend"]
|
||||
|
||||
::: pydase.units
|
||||
handler: python
|
||||
|
||||
::: pydase.config
|
||||
handler: python
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
# Getting Started
|
||||
## Installation
|
||||
{%
|
||||
include-markdown "../README.md"
|
||||
start="<!--installation-start-->"
|
||||
end="<!--installation-end-->"
|
||||
start="<!--getting-started-start-->"
|
||||
end="<!--getting-started-end-->"
|
||||
%}
|
||||
|
||||
## Usage
|
||||
{%
|
||||
include-markdown "../README.md"
|
||||
start="<!--usage-start-->"
|
||||
end="<!--usage-end-->"
|
||||
%}
|
||||
[RESTful API]: ./user-guide/interaction/README.md#restful-api
|
||||
[Python RPC Client]: ./user-guide/interaction/README.md#python-rpc-client
|
||||
[Custom Components]: ./user-guide/Components.md#custom-components-pydasecomponents
|
||||
[Components]: ./user-guide/Components.md
|
||||
|
||||
BIN
docs/images/logo-bw.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
11
docs/images/logo-bw.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg" width="300.000000pt" height="319.000000pt" viewBox="0 0 300.000000 319.000000" preserveAspectRatio="xMidYMid meet">
|
||||
<metadata>
|
||||
Created by potrace 1.10, written by Peter Selinger 2001-2011
|
||||
</metadata>
|
||||
<g transform="translate(0.000000,319.000000) scale(0.050000,-0.050000)" fill="#000000" stroke="none">
|
||||
<path d="M3177 6315 c-73 -26 -181 -83 -240 -128 -87 -67 -137 -88 -270 -115 -1259 -251 -2314 -1289 -2589 -2550 -380 -1734 1006 -3502 2746 -3502 1092 0 1819 261 2376 852 1117 1187 1046 2893 -171 4102 l-265 263 107 71 c65 43 127 106 160 161 68 116 87 115 287 -19 279 -187 300 -77 30 157 l-58 51 115 116 c149 152 167 320 22 199 -224 -185 -335 -226 -354 -131 -34 168 -137 227 -683 390 l-380 114 -350 7 c-326 8 -359 5 -483 -38z m1193 -245 c505 -152 550 -179 550 -322 0 -95 -184 -206 -559 -337 -556 -193 -887 -224 -1121 -104 -71 37 -173 89 -224 115 -221 112 -188 499 57 673 129 91 215 106 577 98 l340 -7 380 -116z m-1647 -319 c-8 -214 19 -324 119 -480 33 -53 57 -98 54 -100 -3 -2 -127 -48 -276 -100 -789 -280 -1197 -648 -1468 -1325 -250 -626 -230 -1189 69 -1886 56 -132 112 -304 130 -400 66 -348 238 -672 518 -975 150 -162 145 -163 -142 -18 -751 378 -1266 1020 -1501 1873 -52 189 -51 877 2 1120 230 1058 1019 1971 2012 2329 129 46 450 147 480 150 6 1 7 -84 3 -188z m2304 -993 c914 -980 1033 -2150 325 -3215 -572 -860 -1720 -1295 -2645 -1002 -560 178 -831 366 -986 683 -223 458 -232 753 -33 1064 175 273 284 290 1082 163 853 -135 1190 -74 1545 280 91 90 165 157 165 148 0 -244 -303 -619 -632 -782 l-174 -86 -374 -11 c-447 -12 -521 -40 -624 -238 -142 -271 -52 -462 244 -518 216 -42 300 -46 464 -24 1202 161 1849 1357 1347 2490 -29 66 -75 226 -101 356 -48 244 -131 451 -249 622 l-61 89 235 80 c306 104 276 110 472 -99z m-772 -195 c280 -415 191 -1010 -208 -1383 -252 -236 -463 -295 -1137 -322 -822 -32 -1036 -94 -1249 -361 -107 -134 -113 -133 -82 7 172 759 472 1031 1191 1078 240 16 342 31 410 61 363 159 379 624 29 795 -99 49 -122 41 451 160 553 116 490 120 595 -35z m-1895 -84 c39 -11 192 -47 340 -80 518 -114 681 -237 592 -446 -67 -156 -155 -191 -550 -215 -782 -47 -1105 -339 -1352 -1226 -37 -131 -53 -128 -89 18 -134 554 57 1165 509 1623 309 313 404 369 550 326z m2342 -1942 c-167 -657 -704 -1119 -1359 -1169 -320 -24 -563 50 -563 173 0 188 127 259 508 282 802 48 1231 374 1375 1048 60 282 66 286 73 41 4 -166 -4 -255 -34 -375z"/>
|
||||
<path d="M3858 5922 c-62 -62 -78 -92 -78 -151 0 -307 422 -382 501 -88 70 262 -231 432 -423 239z m245 -95 c45 -41 48 -113 6 -156 -43 -42 -101 -39 -149 9 -97 97 41 239 143 147z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
BIN
docs/images/logo-colour.png
Normal file
|
After Width: | Height: | Size: 77 KiB |
153
docs/images/logo-colour.svg
Normal file
@@ -0,0 +1,153 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
width="588px"
|
||||
height="626px"
|
||||
viewBox="0 0 588 626"
|
||||
preserveAspectRatio="xMidYMid meet"
|
||||
id="svg184"
|
||||
sodipodi:docname="pydase-logo-colour-3.svg"
|
||||
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
|
||||
inkscape:export-filename="pydase-logo-colour-3.png"
|
||||
inkscape:export-xdpi="96"
|
||||
inkscape:export-ydpi="96"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs184" />
|
||||
<sodipodi:namedview
|
||||
id="namedview184"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:zoom="0.70710678"
|
||||
inkscape:cx="48.083261"
|
||||
inkscape:cy="74.953318"
|
||||
inkscape:window-width="2048"
|
||||
inkscape:window-height="1243"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg184"
|
||||
showgrid="false" />
|
||||
<g
|
||||
fill="#041b31"
|
||||
id="g1"
|
||||
style="display:inline"
|
||||
inkscape:label="Contour">
|
||||
<path
|
||||
d="m 249,624.5 c -0.8,-0.2 -4.9,-0.8 -9,-1.5 -23.8,-3.7 -65.4,-19 -91,-33.5 C 115.5,570.6 81,540.3 58.3,510 41.3,487.2 23.6,454.3 16.2,431.5 8.8,408.8 8.3,406.8 4.9,387.5 1.9,370.5 1.8,368 1.6,342 1.5,313.2 1.4,314 7.1,282.6 18.3,221.6 48.7,167 100.4,115.5 116.6,99.3 126.7,90.8 142.5,80.1 158.5,69.3 182.9,56 199.5,49 210.6,44.4 240.6,34.4 252,31.5 c 7.3,-1.8 22.4,-4.5 25.5,-4.5 0.2,0 2.7,-2.1 5.7,-4.6 C 301.8,6.5 318.4,1 348,0.9 c 17.1,0 36.4,1.4 46,3.2 3,0.6 14.7,4 26,7.4 11.3,3.5 27.3,8.2 35.5,10.4 17.5,4.8 27.3,9.3 33.4,15.3 5.5,5.5 8.1,10.7 8.8,17.4 0.3,3 0.9,5.4 1.4,5.4 4,0 19.5,-9.6 30.7,-19 8.1,-6.9 9.3,-6.9 11.3,-0.1 2,6.6 -0.6,10 -19,25.9 l -3.5,2.9 10.6,10.4 c 13.4,13.2 17.8,21.1 12.4,22.5 -2.9,0.7 -4.8,-0.3 -15.2,-7.8 C 516.1,87.4 503.2,80 500.5,80 c -1.6,0 -2.9,1.5 -5,6.1 -3.8,7.9 -13.7,17.7 -22.6,22.4 l -6.8,3.6 4.7,4.2 c 18.1,16.2 30.1,28 40.8,40 15.1,16.9 22.8,27 32.1,42.4 6.9,11.4 22.2,41.2 23.8,46.3 0.4,1.4 1.6,4.3 2.6,6.5 4.9,10.7 10.9,34.8 14.6,58.5 2.7,17.9 2.5,58.7 -0.5,77.8 -5.3,33.5 -9.2,47.1 -21.3,73.7 -12.6,27.8 -24.1,46.3 -40.8,65.6 -19.2,22.3 -38.5,39.4 -60.5,53.8 -10.2,6.6 -43.5,23 -54.7,26.9 -16.2,5.7 -44,11 -69.1,13.2 -6.9,0.6 -17.5,1.7 -23.5,2.5 -9.4,1.3 -59.9,2 -65.3,1 z m 99.5,-135.4 c 36.7,-9.2 67.4,-29.4 87.4,-57.6 7.2,-10.3 17.8,-31.2 21.6,-42.9 5.7,-17.8 7,-26.5 7,-48.3 0,-18 -0.4,-22.7 -2,-21.2 -0.2,0.3 -1.1,5 -2,10.4 -5.4,34.9 -14.4,55.5 -32.5,74.8 -16.6,17.7 -36.73987,31.75263 -59.4,38.2 -7.25764,2.06498 -18.96791,3.46589 -37.2,4.4 -35.48106,1.81785 -36.6,1.6 -43.6,5.3 -12.5,6.7 -18.3,17.8 -14.4,27.3 2,4.7 6.3,7.1 17.1,9.5 12.5,2.8 13.8,2.9 33,2.5 12.8,-0.3 19,-0.8 25,-2.4 z M 134.4,385.8 c 0.8,-2.9 2.5,-8.9 3.6,-13.3 7.9,-29.5 14.4,-45.5 25.2,-62 7.4,-11.4 12,-16.1 27,-27.5 8.1,-6.1 13.6,-9.4 23.3,-13.6 18.4,-8.1 23.2,-9 48.5,-9.8 36.8,-1.2 44.6,-2.8 53.9,-11.2 9.4,-8.5 10.8,-20 3.7,-30.6 -7.7,-11.7 -15.4,-15.1 -50.6,-22.2 -24.8,-5.1 -30,-6.3 -40.9,-9.7 l -7.3,-2.3 -5.5,2.9 c -9.6,5 -25.36942,18.22759 -38.5,31.3 -19.59963,19.51281 -30.17386,36.16842 -42.7,67.6 -4.80076,12.04646 -7.8,26.5 -9.2,37.8 -1.6,13.7 -0.7,38.8 2,50.6 2.7,12.1 4.2,17.2 5.2,17.2 0.4,0 1.4,-2.4 2.3,-5.2 z"
|
||||
id="path1"
|
||||
sodipodi:nodetypes="ccccccccccccscccccccscccccccsccccccccccccccccccccscccsscccccccccccccccccssccsc"
|
||||
style="fill:#041b31;fill-opacity:1" />
|
||||
</g>
|
||||
<g
|
||||
fill="#003051"
|
||||
id="g84"
|
||||
style="display:inline"
|
||||
inkscape:label="Very Dark Blue">
|
||||
<path
|
||||
d="M 230.4,602 C 175.34835,591.74645 169.18046,579.19949 127.38046,537.39949 126.28656,507.06066 124.35047,466.6837 125.4,421 c 3.1,7.5 6.91046,19.16537 8.35973,29.56569 3.51031,25.1907 16.4289,65.12981 36.44027,90.93431 22.43047,28.92391 69.16433,55.53771 88.55235,64.93033 C 249.09029,604.75095 241.4,604.1 230.4,602 Z"
|
||||
id="path70"
|
||||
sodipodi:nodetypes="cccsacc" />
|
||||
<path
|
||||
d="m 319.4,193.4 c -9.8,-5.8 -14.5,-7.1 -48.4,-14 -18.7,-3.7 -29,-4.8 -29,-6.5 0,-1.7 4.92805,-2.87104 12.5,-5.4 12.8566,-4.29398 19.24892,-5.98769 27.1,-7.9 24.01253,-5.84879 36.7,-8.7 48.4,-10.5 25.2,-4 35.7,-5.4 42.5,-5.5 6.2,-0.1 7.9,0.3 14.6,3.6 9.7,4.8 15.5,10 26.3,24 -32.58707,9.22703 -69.37398,17.37018 -94,22.2 z"
|
||||
id="path77"
|
||||
sodipodi:nodetypes="ccsssccccc" />
|
||||
</g>
|
||||
<g
|
||||
fill="#033f64"
|
||||
id="g97"
|
||||
style="display:inline"
|
||||
inkscape:label="Dark Blue">
|
||||
<path
|
||||
d="m 152.17414,396.63217 c 0.38601,-2.81096 5.82243,-25.08009 21.18483,-38.15736 33.76966,-28.74649 155.07007,-22.31003 192.71893,-28.8897 C 388.43397,313.23279 413.02792,214.49976 425.1,189.5 c 7.4,15 16.15078,54.97811 10.64936,81.97944 -4.26433,20.9296 -15.49967,42.2641 -32.45863,55.24972 -23.8158,18.23596 -36.39069,23.58236 -86.79073,23.77084 -83.29996,0.31152 -95.44833,-4.42471 -136.27417,16.21161 -12.20115,6.16734 -21.45976,18.1207 -28.05169,29.92056 z"
|
||||
id="path118"
|
||||
sodipodi:nodetypes="csccaasac"
|
||||
style="display:inline;fill:#18759e;fill-opacity:1;fill-rule:nonzero" />
|
||||
<path
|
||||
d="M 183.5,588.1 C 115.8931,558.47699 107.64772,492.94457 88.1,430.2335 79,400.6335 76.84251,387.87492 75,366.15 c -1.824643,-21.51425 -3.417479,-43.86578 2.1,-64.7404 8.432657,-31.90379 27.29188,-60.49473 46.1,-87.6096 11.8141,-17.03188 24.95272,-33.78473 41.4,-46.4 13.29518,-10.19757 29.7308,-15.48328 44.9,-22.6 23.68008,-11.10966 63.61618,-31.81861 71.93442,-31.35243 3.81558,6.62743 29.05267,18.5147 28.43398,19.68762 0.31235,2.20322 -15.49372,-1.71368 -93.0684,32.46481 -30.64541,13.50201 -57.7,42.3 -74.5,67.4 -13.2,19.7 -23.8,43.8 -29.8,67.5 -5.2,20.6 -5.8,26.4 -5.2,45.7 0.8,25.7 4.5,42 15.4,68.8 l 5.5,13.5 0.3,13 c 0.1,7.1 0.6,15.1 1,17.6 0.4,2.6 1.31647,9.84975 0.81647,10.14975 -1.3,0.8 -0.71647,10.65025 1.78353,20.75025 2.9,11.9 13.6,43.4 17,50.1 9.51543,25.08025 19.6983,31.17451 34.4,48 z"
|
||||
id="path92"
|
||||
sodipodi:nodetypes="ccaaaaaccsccccccccccc"
|
||||
style="fill:#18759e;fill-opacity:1" />
|
||||
<path
|
||||
d="M 336.53336,126.11775 C 326.2422,124.21015 287.27262,118.19694 281.1,72.4 398.98512,97.839775 428.5705,92.736362 481.94363,60.277903 c 0.3,15.65 -0.24934,17.091747 -5.11226,23.440508 -12.11958,15.82266 -34.57733,20.119399 -53.08407,27.518149 -15.89858,6.35605 -32.39842,11.77707 -49.33154,14.31356 -12.48954,1.87087 -28.16017,2.36977 -37.8824,0.56763 z"
|
||||
id="path121"
|
||||
sodipodi:nodetypes="sccaaas"
|
||||
style="display:inline;fill:#18759e;fill-opacity:1" />
|
||||
</g>
|
||||
<g
|
||||
fill="#c88700"
|
||||
id="g133"
|
||||
style="display:inline"
|
||||
inkscape:label="Orange">
|
||||
<path
|
||||
d="M387.4 69.6 c-2.7 -2.7 -3.4 -4.2 -3.4 -7.4 0 -4.7 2.9 -8.8 7.6 -10.8 5.2 -2.2 7.3 -1.7 11.5 2.5 5.2 5.1 5.4 10.3 0.8 15.6 -2.8 3.1 -3.6 3.5 -8.1 3.5 -4.4 0 -5.4 -0.4 -8.4 -3.4z"
|
||||
id="path125" />
|
||||
<path
|
||||
d="m 319.5,603.3 c -20.3,-1 -47.80327,-8.953 -69.9,-18.6 -12.64521,-5.52065 -23.8619,-13.95619 -35,-22.1 -5.09897,-3.72819 -9.99476,-7.77262 -14.5,-12.2 -8.10524,-7.96518 -17.7,-18.1 -22.4,-25.7 -13.9,-22.6 -23.4,-49.7 -26.7,-76.3 -1,-7.8 -0.9,-10.1 0.5,-15.5 3.5,-13.8 17.6,-39 26.3,-47.1 2.7,-2.6 8.1,-6.2 11.9,-8.1 8.6,-4.4 24.6,-9.3 33.8,-10.4 7.3,-0.9 66.1,-0.8 73,0.1 2.2,0.3 13.7,0.8 25.7,1.2 22.9,0.7 34.8,-0.2 49.2,-3.5 0,0 49.54914,-16.12943 68.7,-52.4 l 3.8,-7.2 0.1,6 c 0,8.5 -4.5,35.3 -7.5,44.2 -5.06001,15.02512 -12.78595,28.02413 -23.26042,39.12091 -9.81203,10.39498 -22.03592,19.12073 -36.73958,26.27909 -17.6,8.5 -16.2,8.2 -52,8.4 -30.6,0.1 -32.3,0.2 -37.6,2.3 -16.6,6.6 -26.4,18.6 -29.5,36.3 -1.6,8.9 -1.1,16.5 1.1,20.9 1.8,3.3 8.2,9.4 12.2,11.4 4.3,2.1 18.7,5.2 31.3,6.7 20.6,2.4 50,-1.8 71.5,-10.1 22.9,-8.9 41.8,-21.2 59,-38.4 18.5,-18.5 31.2,-39.3 39.5,-64.5 12.2,-37.2 12.4,-66.6 0.5,-107.7 -3.2,-11.2 -4.6,-14.9 -12,-30.8 -2.7,-6 -4.1,-11.8 -7,-30.5 -0.9,-5.7 -2.6,-13.8 -3.6,-18 -2.3,-9 -12.8,-31.1 -18.8,-39.6 -5.9,-8.4 -18.1,-21.5 -25.2,-27.1 -3.3,-2.6 -5.6,-5.1 -5.2,-5.5 0.4,-0.4 5.1,-1.9 10.3,-3.3 17.7,-5 26.1,-7.9 29.6,-10.2 1.9,-1.3 4.3,-2.4 5.2,-2.4 5,0.1 36,27 53.9,46.9 46.2,51.1 71.3,114.2 71.3,178.9 0,60.4 -17.3,114.5 -51.4,160.6 -14.1,19.3 -42.2,45.5 -64.6,60.6 -12.3,8.3 -21.8,13.2 -36.1,18.9 -40.2,15.9 -63.3,20.2 -99.4,18.4 z"
|
||||
id="path131"
|
||||
sodipodi:nodetypes="caaacccccccccccccsccccccccscccccccsccccscccc" />
|
||||
</g>
|
||||
<g
|
||||
fill="#38b3d3"
|
||||
id="g162"
|
||||
style="display:inline"
|
||||
inkscape:label="Blue">
|
||||
<path
|
||||
d="m 152.17414,396.63217 c -2.38519,-1.38132 9.27416,-52.79756 19.37815,-68.90898 16.15254,-24.81116 34.25689,-40.51929 62.0508,-48.64318 22.03094,-6.43944 64.62509,-4.00901 74.27424,-7.22545 5.13056,-1.80515 13.30143,-6.84069 18.81201,-11.68624 5.89061,-5.1305 11.1162,-14.91656 12.63633,-23.84749 1.71019,-9.88108 -0.47111,-21.90723 -6.47249,-30.32083 -2.85318,-4 -7.4141,-11.9156 -29.3718,-19.44781 19.92351,-5.56647 53.71798,-12.0993 80.70491,-17.53799 7.71528,-1.55487 13.91102,-2.63422 23.21371,-4.3142 21.30966,22.8642 21.21637,35.77338 26.93252,55.92264 0.0584,24.34066 -3.50141,45.36921 -16.09946,65.67248 -23.04998,44.93326 -65.30711,57.83541 -113.96611,59.38228 -72.68272,0.94776 -90.43688,2.59826 -116.76278,14.72068 -22.87446,10.53312 -33.71226,36.8281 -35.33003,36.23409 z"
|
||||
id="path159"
|
||||
sodipodi:nodetypes="csscccscacssssc"
|
||||
style="display:inline;stroke-width:0.999987" />
|
||||
<path
|
||||
d="M 59.627728,486.61872 C 26.249201,426.79436 20.062286,396.1054 18.4,359.3 17.560667,340.71596 17.7,316.6 19.4,303.5 23.8,271.6 35.4,236 51.1,206 75.3,160.1 119.7,111.9 162.1,85.7 194.42327,64.457719 225.27293,54.821946 268,43 c -4.38883,35.093545 0.24301,53.781332 18.43033,75.35581 -16.19179,5.17933 -38.68025,13.24334 -44.53566,15.38169 -16.14313,5.89535 -49.89323,20.65189 -79.79467,47.7625 -27.4732,24.909 -59.81413,81.60725 -65.712627,143.66935 -4.157076,43.73944 6.451807,84.86847 34.031537,142.43409 3.43378,24.64602 9.97891,73.87903 71.35443,127.63575 C 125.61659,570.1535 67.391777,500.53423 59.627728,486.61872 Z"
|
||||
id="path160"
|
||||
sodipodi:nodetypes="sscccccsssccs"
|
||||
style="display:inline" />
|
||||
<path
|
||||
d="m 332,111.5 c -7.6,-1.9 -19.1,-6.8 -24.2,-10.3 -5.6,-3.9 -14.42556,-11.925563 -21.72556,-10.225563 -0.59944,-1.638563 -2.45486,-5.992204 -3.00412,-8.525 C 277.37032,64.949437 281.9,46.6 294.8,33.2 c 6.5,-6.8 14.5,-10.9 27.7,-14.4 7,-1.9 10.6,-2.1 29,-2.1 28.2,0.1 42.1,2.5 71.2,12.3 6.8,2.2 19.1,5.8 27.4,8 16.6,4.4 23.6,7.6 28,12.9 2.6,3.2 3.87429,4.2 3.87429,11.2 v 7.7 L 473.8,73.7 c -4,2.8 -9.8,6.4 -12.8,8.1 -15.5,8.6 -69.4,26.1 -91.5,29.7 -11,1.8 -30.1,1.8 -37.5,0 z m 74.6,-27.4 c 8,-3.6 13.4,-13.3 13.4,-24 0,-7.1 -2.5,-12.5 -7.8,-17.3 -6.2,-5.6 -15.4,-7.3 -24.6,-4.6 -5.8,1.7 -14.1,10.2 -15.6,15.9 -3.2,11.9 3.1,25.6 14,30.3 4.9,2.1 15.5,2 20.6,-0.3 z"
|
||||
id="path162"
|
||||
sodipodi:nodetypes="ccccccccscsccccccsccccc" />
|
||||
</g>
|
||||
<g
|
||||
fill="#fccd00"
|
||||
id="g165"
|
||||
style="display:inline"
|
||||
inkscape:label="Yellow">
|
||||
<path
|
||||
d="M 290.57843,579.73223 C 262.53343,574.09041 238.11479,563.08508 212.75,550.7 189.86762,538.42339 184.68162,535.3415 175.4,519.55 c -7.00993,-11.92651 -30.58414,-55.74044 -23.8,-86.25 4.0198,-18.07777 14.86881,-43.99552 38.1,-55.6 16.46843,-0.10091 32.45479,1.52207 48.61284,3.12963 26.00767,2.58749 51.5763,9.85418 77.70491,10.47812 23.17389,0.55338 47.87531,2.89829 69.28278,-5.99304 22.20756,-9.22363 37.89511,-23.97358 55.12824,-46.53102 -2.5563,14.26912 -7.95593,45.65799 -44.98524,71.69133 -11.14814,7.83767 -23.62107,14.42481 -36.84575,17.7139 -10.72566,2.66757 -18.69625,1.20562 -33.13151,1.30575 C 310.59858,429.5978 291.1,429.3 281.1,434.3 c -12.2,6 -20.6,17.5 -23.7,32.3 -3.2,15.3 0.11875,24.31875 9.51875,31.11875 4.9,3.6 9.48125,5.48125 25.58125,8.38125 10.2,1.8 14.5,2 29,1.6 19.3,-0.6 27.7,-2.1 45,-7.8 65,-21.6 108.32042,-74.69846 114.2483,-146.4 0.5433,-6.57154 0.51635,-11.00098 0.35824,-16.5 -0.12685,-4.41201 -0.53376,-8.81617 -1.04757,-13.2 -0.31035,-2.64783 -0.73303,-5.28343 -1.22803,-7.90303 -1.04804,-5.54641 -2.17688,-11.08849 -3.68486,-16.52789 -3.8173,-13.76923 -7.04718,-27.944 -13.54608,-40.66908 -8.57845,-16.79692 -6.03317,-32.79012 -12.7776,-53.20969 -5.4006,-16.35095 -14.13511,-31.22562 -25.45092,-47.68672 9.20262,-3.00968 42.04296,-13.97755 50.15501,-17.80255 10.28756,9.39474 26.84483,25.52589 38.78601,40.81146 30.4959,39.03695 51.65187,83.78847 56.2875,132.1875 4.21372,43.99397 -0.37701,62.58021 -7.1,82.25 -6.8,20.7 -14.2,35.95 -22.6,53.65 -14.8,30.9 -37.8,59.1 -65.1,79.7 -34.6,26.2 -53.59209,36.03122 -84.7,43.9 -28.19212,7.13123 -69.76059,13.01808 -98.52157,7.23223 z"
|
||||
id="path163"
|
||||
sodipodi:nodetypes="sssscaaacsasccccccsaaaassccsscccss"
|
||||
style="display:inline" />
|
||||
<path
|
||||
d="M 391.3,71.5 C 387.8,70 384,64.8 384,61.4 c 0,-2.7 3.4,-7 7,-8.9 4.9,-2.5 7.8,-1.9 12.2,2.5 3.6,3.5 4,4.4 3.5,7.5 -0.7,4.5 -3.5,8.2 -7.1,9.5 -3.7,1.3 -4.4,1.2 -8.3,-0.5 z"
|
||||
id="path165" />
|
||||
</g>
|
||||
<g
|
||||
fill="#fafcfc"
|
||||
id="g183"
|
||||
inkscape:label="White"
|
||||
style="display:inline">
|
||||
<path
|
||||
d="M 292.22204,510.87608 C 280.22101,508.20541 268.81402,500.34672 263.69227,494.9842 275.64093,505.5687 304.1,508.3 321.5,507.7 c 21.55,-2.225 49.37501,-6.43114 86.62589,-28.91732 22.61919,-13.65389 51.87112,-50.42418 60.53015,-75.76929 6.66561,-19.51032 10.07957,-35.4123 12.39396,-53.90714 3.1459,18.64649 1.15198,36.57617 -1.3,46.46875 -2.9,11.1 -6.35,24.125 -11.95,34.225 -8.3,15.1 -27.2,38.1 -39.1,47.8 -25.5,20.5 -61.64365,33.01311 -92.85,36.3 -15.06775,1.58705 -35.15198,-1.1377 -43.62796,-3.02392 z"
|
||||
id="path174"
|
||||
sodipodi:nodetypes="sccssccccss" />
|
||||
<path
|
||||
d="M 28.4,416.5 C 15.349709,374.67557 18.014551,365.86291 17.43688,340.1625 17.048048,322.86353 19.119484,305.4699 22.5,288.5 c 10.62259,-53.3245 29.9,-91.9 61.3,-131 11,-13.7 40.9,-44 52.7,-53.5 C 166.2,80.3 209,59.4 252,47.5 c 8.5,-2.3 15.6,-4.2 15.7,-4.1 0.1,0.1 -0.4,3.8 -1.2,8.1 -0.8,4.4 -1.4,8.1 -1.5,8.3 0,0.1 -0.8,0.2 -1.9,0.2 -1,0 -6.3,1.4 -11.7,3 -41.6,12.8 -72.7,28.3 -103.6,51.7 -24.8,18.7 -39.9,34 -59.6,60 C 63.3,207.6 42.3,251 34.6,285.5 29.2,309.4 26.825886,327.09972 25.755456,348.16934 24.598916,370.93392 24.8,389.7 28.4,416.5 Z"
|
||||
id="path178"
|
||||
sodipodi:nodetypes="cascccsccsccccac" />
|
||||
<path
|
||||
d="m 208.22773,289.96967 c 9.51882,-5.66851 21.67237,-10.67386 30.98163,-12.63033 5.43202,-1.14162 18.645,-2.6057 32.04905,-3.10711 14.85841,-0.5558 26.43935,0.0727 34.62618,-2.66291 17.29397,-5.77872 28.56982,-17.26767 32.18039,-30.34042 1.49085,-5.3979 2.16985,-10.98219 1.55113,-16.06452 -0.70068,-5.7556 -3.89365,-15.38399 -6.46854,-18.70034 7.65573,3.55244 13.50421,17.23897 13.20338,31.10442 -0.37371,17.22406 -13.0606,32.1577 -24.74645,38.26377 -9.47406,4.95038 -29.08518,7.77124 -44.57677,8.07938 -10.95355,0.21788 -20.76029,0.67236 -31.82773,2.18839 -11.53232,1.57971 -30.58589,8.52074 -45.60676,17.46672 -7.81866,4.65656 -18.21827,12.44919 -21.26902,14.46609 4.45077,-6.22439 16.85283,-20.2914 29.90351,-28.06314 z"
|
||||
id="path181"
|
||||
sodipodi:nodetypes="sssssscssssscs" />
|
||||
<path
|
||||
d="m 282.3,76.8 c -1.6,-2.7 -0.6,-19.1 1.6,-25.2 4.3,-12 13.6,-22.7 23.4,-27.1 12.6,-5.5 18.3,-6.7 36.2,-7.2 29.7,-0.9 49.3,2 77,11.3 7.2,2.4 19.8,6.1 28.2,8.3 19.3,5.1 26.3,8.5 30.5,14.9 1.6,2.4 2.5,8.2 1.3,8.2 -0.3,0 -2.6,-1.3 -5.2,-2.9 C 470.5,54.2 463.8,51.9 442,46 435.7,44.2 426,41.1 420.5,39 415,36.9 408.9,34.7 407,34.1 c -12,-3.7 -49.7,-5.9 -71.3,-4.2 -11,0.8 -13.8,1.4 -19.4,4.1 -12.9,6 -24.1,20.5 -27.6,35.9 -1.7,7.2 -4.3,10.1 -6.4,6.9 z"
|
||||
id="path183" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/images/logo-with-text.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
145
docs/images/logo-with-text.svg
Normal file
@@ -0,0 +1,145 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="84.373627mm"
|
||||
height="29.06181mm"
|
||||
viewBox="0 0 84.373627 29.06181"
|
||||
version="1.1"
|
||||
id="svg1"
|
||||
xml:space="preserve"
|
||||
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
|
||||
sodipodi:docname="logo-with-text.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><sodipodi:namedview
|
||||
id="namedview1"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:zoom="1.6080267"
|
||||
inkscape:cx="230.09568"
|
||||
inkscape:cy="46.019136"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1011"
|
||||
inkscape:window-x="26"
|
||||
inkscape:window-y="23"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="layer1" /><defs
|
||||
id="defs1" /><g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-27.646074,-133.9691)"><g
|
||||
id="g2"
|
||||
transform="matrix(0.04656788,0,0,0.04656788,27.572788,133.92718)"
|
||||
style="stroke-width:5.68167"><g
|
||||
fill="#041b31"
|
||||
id="g1"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Contour"><path
|
||||
d="m 249,624.5 c -0.8,-0.2 -4.9,-0.8 -9,-1.5 -23.8,-3.7 -65.4,-19 -91,-33.5 C 115.5,570.6 81,540.3 58.3,510 41.3,487.2 23.6,454.3 16.2,431.5 8.8,408.8 8.3,406.8 4.9,387.5 1.9,370.5 1.8,368 1.6,342 1.5,313.2 1.4,314 7.1,282.6 18.3,221.6 48.7,167 100.4,115.5 116.6,99.3 126.7,90.8 142.5,80.1 158.5,69.3 182.9,56 199.5,49 210.6,44.4 240.6,34.4 252,31.5 c 7.3,-1.8 22.4,-4.5 25.5,-4.5 0.2,0 2.7,-2.1 5.7,-4.6 C 301.8,6.5 318.4,1 348,0.9 c 17.1,0 36.4,1.4 46,3.2 3,0.6 14.7,4 26,7.4 11.3,3.5 27.3,8.2 35.5,10.4 17.5,4.8 27.3,9.3 33.4,15.3 5.5,5.5 8.1,10.7 8.8,17.4 0.3,3 0.9,5.4 1.4,5.4 4,0 19.5,-9.6 30.7,-19 8.1,-6.9 9.3,-6.9 11.3,-0.1 2,6.6 -0.6,10 -19,25.9 l -3.5,2.9 10.6,10.4 c 13.4,13.2 17.8,21.1 12.4,22.5 -2.9,0.7 -4.8,-0.3 -15.2,-7.8 C 516.1,87.4 503.2,80 500.5,80 c -1.6,0 -2.9,1.5 -5,6.1 -3.8,7.9 -13.7,17.7 -22.6,22.4 l -6.8,3.6 4.7,4.2 c 18.1,16.2 30.1,28 40.8,40 15.1,16.9 22.8,27 32.1,42.4 6.9,11.4 22.2,41.2 23.8,46.3 0.4,1.4 1.6,4.3 2.6,6.5 4.9,10.7 10.9,34.8 14.6,58.5 2.7,17.9 2.5,58.7 -0.5,77.8 -5.3,33.5 -9.2,47.1 -21.3,73.7 -12.6,27.8 -24.1,46.3 -40.8,65.6 -19.2,22.3 -38.5,39.4 -60.5,53.8 -10.2,6.6 -43.5,23 -54.7,26.9 -16.2,5.7 -44,11 -69.1,13.2 -6.9,0.6 -17.5,1.7 -23.5,2.5 -9.4,1.3 -59.9,2 -65.3,1 z m 99.5,-135.4 c 36.7,-9.2 67.4,-29.4 87.4,-57.6 7.2,-10.3 17.8,-31.2 21.6,-42.9 5.7,-17.8 7,-26.5 7,-48.3 0,-18 -0.4,-22.7 -2,-21.2 -0.2,0.3 -1.1,5 -2,10.4 -5.4,34.9 -14.4,55.5 -32.5,74.8 -16.6,17.7 -36.73987,31.75263 -59.4,38.2 -7.25764,2.06498 -18.96791,3.46589 -37.2,4.4 -35.48106,1.81785 -36.6,1.6 -43.6,5.3 -12.5,6.7 -18.3,17.8 -14.4,27.3 2,4.7 6.3,7.1 17.1,9.5 12.5,2.8 13.8,2.9 33,2.5 12.8,-0.3 19,-0.8 25,-2.4 z M 134.4,385.8 c 0.8,-2.9 2.5,-8.9 3.6,-13.3 7.9,-29.5 14.4,-45.5 25.2,-62 7.4,-11.4 12,-16.1 27,-27.5 8.1,-6.1 13.6,-9.4 23.3,-13.6 18.4,-8.1 23.2,-9 48.5,-9.8 36.8,-1.2 44.6,-2.8 53.9,-11.2 9.4,-8.5 10.8,-20 3.7,-30.6 -7.7,-11.7 -15.4,-15.1 -50.6,-22.2 -24.8,-5.1 -30,-6.3 -40.9,-9.7 l -7.3,-2.3 -5.5,2.9 c -9.6,5 -25.36942,18.22759 -38.5,31.3 -19.59963,19.51281 -30.17386,36.16842 -42.7,67.6 -4.80076,12.04646 -7.8,26.5 -9.2,37.8 -1.6,13.7 -0.7,38.8 2,50.6 2.7,12.1 4.2,17.2 5.2,17.2 0.4,0 1.4,-2.4 2.3,-5.2 z"
|
||||
id="path1"
|
||||
sodipodi:nodetypes="ccccccccccccscccccccscccccccsccccccccccccccccccccscccsscccccccccccccccccssccsc"
|
||||
style="fill:#041b31;fill-opacity:1;stroke-width:5.68167" /></g><g
|
||||
fill="#003051"
|
||||
id="g84"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Very Dark Blue"><path
|
||||
d="M 230.4,602 C 175.34835,591.74645 169.18046,579.19949 127.38046,537.39949 126.28656,507.06066 124.35047,466.6837 125.4,421 c 3.1,7.5 6.91046,19.16537 8.35973,29.56569 3.51031,25.1907 16.4289,65.12981 36.44027,90.93431 22.43047,28.92391 69.16433,55.53771 88.55235,64.93033 C 249.09029,604.75095 241.4,604.1 230.4,602 Z"
|
||||
id="path70"
|
||||
sodipodi:nodetypes="cccsacc"
|
||||
style="stroke-width:5.68167" /><path
|
||||
d="m 319.4,193.4 c -9.8,-5.8 -14.5,-7.1 -48.4,-14 -18.7,-3.7 -29,-4.8 -29,-6.5 0,-1.7 4.92805,-2.87104 12.5,-5.4 12.8566,-4.29398 19.24892,-5.98769 27.1,-7.9 24.01253,-5.84879 36.7,-8.7 48.4,-10.5 25.2,-4 35.7,-5.4 42.5,-5.5 6.2,-0.1 7.9,0.3 14.6,3.6 9.7,4.8 15.5,10 26.3,24 -32.58707,9.22703 -69.37398,17.37018 -94,22.2 z"
|
||||
id="path77"
|
||||
sodipodi:nodetypes="ccsssccccc"
|
||||
style="stroke-width:5.68167" /></g><g
|
||||
fill="#033f64"
|
||||
id="g97"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Dark Blue"><path
|
||||
d="m 152.17414,396.63217 c 0.38601,-2.81096 5.82243,-25.08009 21.18483,-38.15736 33.76966,-28.74649 155.07007,-22.31003 192.71893,-28.8897 C 388.43397,313.23279 413.02792,214.49976 425.1,189.5 c 7.4,15 16.15078,54.97811 10.64936,81.97944 -4.26433,20.9296 -15.49967,42.2641 -32.45863,55.24972 -23.8158,18.23596 -36.39069,23.58236 -86.79073,23.77084 -83.29996,0.31152 -95.44833,-4.42471 -136.27417,16.21161 -12.20115,6.16734 -21.45976,18.1207 -28.05169,29.92056 z"
|
||||
id="path118"
|
||||
sodipodi:nodetypes="csccaasac"
|
||||
style="display:inline;fill:#18759e;fill-opacity:1;fill-rule:nonzero;stroke-width:5.68167" /><path
|
||||
d="M 183.5,588.1 C 115.8931,558.47699 107.64772,492.94457 88.1,430.2335 79,400.6335 76.84251,387.87492 75,366.15 c -1.824643,-21.51425 -3.417479,-43.86578 2.1,-64.7404 8.432657,-31.90379 27.29188,-60.49473 46.1,-87.6096 11.8141,-17.03188 24.95272,-33.78473 41.4,-46.4 13.29518,-10.19757 29.7308,-15.48328 44.9,-22.6 23.68008,-11.10966 63.61618,-31.81861 71.93442,-31.35243 3.81558,6.62743 29.05267,18.5147 28.43398,19.68762 0.31235,2.20322 -15.49372,-1.71368 -93.0684,32.46481 -30.64541,13.50201 -57.7,42.3 -74.5,67.4 -13.2,19.7 -23.8,43.8 -29.8,67.5 -5.2,20.6 -5.8,26.4 -5.2,45.7 0.8,25.7 4.5,42 15.4,68.8 l 5.5,13.5 0.3,13 c 0.1,7.1 0.6,15.1 1,17.6 0.4,2.6 1.31647,9.84975 0.81647,10.14975 -1.3,0.8 -0.71647,10.65025 1.78353,20.75025 2.9,11.9 13.6,43.4 17,50.1 9.51543,25.08025 19.6983,31.17451 34.4,48 z"
|
||||
id="path92"
|
||||
sodipodi:nodetypes="ccaaaaaccsccccccccccc"
|
||||
style="fill:#18759e;fill-opacity:1;stroke-width:5.68167" /><path
|
||||
d="M 336.53336,126.11775 C 326.2422,124.21015 287.27262,118.19694 281.1,72.4 398.98512,97.839775 428.5705,92.736362 481.94363,60.277903 c 0.3,15.65 -0.24934,17.091747 -5.11226,23.440508 -12.11958,15.82266 -34.57733,20.119399 -53.08407,27.518149 -15.89858,6.35605 -32.39842,11.77707 -49.33154,14.31356 -12.48954,1.87087 -28.16017,2.36977 -37.8824,0.56763 z"
|
||||
id="path121"
|
||||
sodipodi:nodetypes="sccaaas"
|
||||
style="display:inline;fill:#18759e;fill-opacity:1;stroke-width:5.68167" /></g><g
|
||||
fill="#c88700"
|
||||
id="g133"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Orange"><path
|
||||
d="m 387.4,69.6 c -2.7,-2.7 -3.4,-4.2 -3.4,-7.4 0,-4.7 2.9,-8.8 7.6,-10.8 5.2,-2.2 7.3,-1.7 11.5,2.5 5.2,5.1 5.4,10.3 0.8,15.6 -2.8,3.1 -3.6,3.5 -8.1,3.5 -4.4,0 -5.4,-0.4 -8.4,-3.4 z"
|
||||
id="path125"
|
||||
style="stroke-width:5.68167" /><path
|
||||
d="m 319.5,603.3 c -20.3,-1 -47.80327,-8.953 -69.9,-18.6 -12.64521,-5.52065 -23.8619,-13.95619 -35,-22.1 -5.09897,-3.72819 -9.99476,-7.77262 -14.5,-12.2 -8.10524,-7.96518 -17.7,-18.1 -22.4,-25.7 -13.9,-22.6 -23.4,-49.7 -26.7,-76.3 -1,-7.8 -0.9,-10.1 0.5,-15.5 3.5,-13.8 17.6,-39 26.3,-47.1 2.7,-2.6 8.1,-6.2 11.9,-8.1 8.6,-4.4 24.6,-9.3 33.8,-10.4 7.3,-0.9 66.1,-0.8 73,0.1 2.2,0.3 13.7,0.8 25.7,1.2 22.9,0.7 34.8,-0.2 49.2,-3.5 0,0 49.54914,-16.12943 68.7,-52.4 l 3.8,-7.2 0.1,6 c 0,8.5 -4.5,35.3 -7.5,44.2 -5.06001,15.02512 -12.78595,28.02413 -23.26042,39.12091 -9.81203,10.39498 -22.03592,19.12073 -36.73958,26.27909 -17.6,8.5 -16.2,8.2 -52,8.4 -30.6,0.1 -32.3,0.2 -37.6,2.3 -16.6,6.6 -26.4,18.6 -29.5,36.3 -1.6,8.9 -1.1,16.5 1.1,20.9 1.8,3.3 8.2,9.4 12.2,11.4 4.3,2.1 18.7,5.2 31.3,6.7 20.6,2.4 50,-1.8 71.5,-10.1 22.9,-8.9 41.8,-21.2 59,-38.4 18.5,-18.5 31.2,-39.3 39.5,-64.5 12.2,-37.2 12.4,-66.6 0.5,-107.7 -3.2,-11.2 -4.6,-14.9 -12,-30.8 -2.7,-6 -4.1,-11.8 -7,-30.5 -0.9,-5.7 -2.6,-13.8 -3.6,-18 -2.3,-9 -12.8,-31.1 -18.8,-39.6 -5.9,-8.4 -18.1,-21.5 -25.2,-27.1 -3.3,-2.6 -5.6,-5.1 -5.2,-5.5 0.4,-0.4 5.1,-1.9 10.3,-3.3 17.7,-5 26.1,-7.9 29.6,-10.2 1.9,-1.3 4.3,-2.4 5.2,-2.4 5,0.1 36,27 53.9,46.9 46.2,51.1 71.3,114.2 71.3,178.9 0,60.4 -17.3,114.5 -51.4,160.6 -14.1,19.3 -42.2,45.5 -64.6,60.6 -12.3,8.3 -21.8,13.2 -36.1,18.9 -40.2,15.9 -63.3,20.2 -99.4,18.4 z"
|
||||
id="path131"
|
||||
sodipodi:nodetypes="caaacccccccccccccsccccccccscccccccsccccscccc"
|
||||
style="stroke-width:5.68167" /></g><g
|
||||
fill="#38b3d3"
|
||||
id="g162"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Blue"><path
|
||||
d="m 152.17414,396.63217 c -2.38519,-1.38132 9.27416,-52.79756 19.37815,-68.90898 16.15254,-24.81116 34.25689,-40.51929 62.0508,-48.64318 22.03094,-6.43944 64.62509,-4.00901 74.27424,-7.22545 5.13056,-1.80515 13.30143,-6.84069 18.81201,-11.68624 5.89061,-5.1305 11.1162,-14.91656 12.63633,-23.84749 1.71019,-9.88108 -0.47111,-21.90723 -6.47249,-30.32083 -2.85318,-4 -7.4141,-11.9156 -29.3718,-19.44781 19.92351,-5.56647 53.71798,-12.0993 80.70491,-17.53799 7.71528,-1.55487 13.91102,-2.63422 23.21371,-4.3142 21.30966,22.8642 21.21637,35.77338 26.93252,55.92264 0.0584,24.34066 -3.50141,45.36921 -16.09946,65.67248 -23.04998,44.93326 -65.30711,57.83541 -113.96611,59.38228 -72.68272,0.94776 -90.43688,2.59826 -116.76278,14.72068 -22.87446,10.53312 -33.71226,36.8281 -35.33003,36.23409 z"
|
||||
id="path159"
|
||||
sodipodi:nodetypes="csscccscacssssc"
|
||||
style="display:inline;stroke-width:5.68161" /><path
|
||||
d="M 59.627728,486.61872 C 26.249201,426.79436 20.062286,396.1054 18.4,359.3 17.560667,340.71596 17.7,316.6 19.4,303.5 23.8,271.6 35.4,236 51.1,206 75.3,160.1 119.7,111.9 162.1,85.7 194.42327,64.457719 225.27293,54.821946 268,43 c -4.38883,35.093545 0.24301,53.781332 18.43033,75.35581 -16.19179,5.17933 -38.68025,13.24334 -44.53566,15.38169 -16.14313,5.89535 -49.89323,20.65189 -79.79467,47.7625 -27.4732,24.909 -59.81413,81.60725 -65.712627,143.66935 -4.157076,43.73944 6.451807,84.86847 34.031537,142.43409 3.43378,24.64602 9.97891,73.87903 71.35443,127.63575 C 125.61659,570.1535 67.391777,500.53423 59.627728,486.61872 Z"
|
||||
id="path160"
|
||||
sodipodi:nodetypes="sscccccsssccs"
|
||||
style="display:inline;stroke-width:5.68167" /><path
|
||||
d="m 332,111.5 c -7.6,-1.9 -19.1,-6.8 -24.2,-10.3 -5.6,-3.9 -14.42556,-11.925563 -21.72556,-10.225563 -0.59944,-1.638563 -2.45486,-5.992204 -3.00412,-8.525 C 277.37032,64.949437 281.9,46.6 294.8,33.2 c 6.5,-6.8 14.5,-10.9 27.7,-14.4 7,-1.9 10.6,-2.1 29,-2.1 28.2,0.1 42.1,2.5 71.2,12.3 6.8,2.2 19.1,5.8 27.4,8 16.6,4.4 23.6,7.6 28,12.9 2.6,3.2 3.87429,4.2 3.87429,11.2 v 7.7 L 473.8,73.7 c -4,2.8 -9.8,6.4 -12.8,8.1 -15.5,8.6 -69.4,26.1 -91.5,29.7 -11,1.8 -30.1,1.8 -37.5,0 z m 74.6,-27.4 c 8,-3.6 13.4,-13.3 13.4,-24 0,-7.1 -2.5,-12.5 -7.8,-17.3 -6.2,-5.6 -15.4,-7.3 -24.6,-4.6 -5.8,1.7 -14.1,10.2 -15.6,15.9 -3.2,11.9 3.1,25.6 14,30.3 4.9,2.1 15.5,2 20.6,-0.3 z"
|
||||
id="path162"
|
||||
sodipodi:nodetypes="ccccccccscsccccccsccccc"
|
||||
style="stroke-width:5.68167" /></g><g
|
||||
fill="#fccd00"
|
||||
id="g165"
|
||||
style="display:inline;stroke-width:5.68167"
|
||||
inkscape:label="Yellow"><path
|
||||
d="M 290.57843,579.73223 C 262.53343,574.09041 238.11479,563.08508 212.75,550.7 189.86762,538.42339 184.68162,535.3415 175.4,519.55 c -7.00993,-11.92651 -30.58414,-55.74044 -23.8,-86.25 4.0198,-18.07777 14.86881,-43.99552 38.1,-55.6 16.46843,-0.10091 32.45479,1.52207 48.61284,3.12963 26.00767,2.58749 51.5763,9.85418 77.70491,10.47812 23.17389,0.55338 47.87531,2.89829 69.28278,-5.99304 22.20756,-9.22363 37.89511,-23.97358 55.12824,-46.53102 -2.5563,14.26912 -7.95593,45.65799 -44.98524,71.69133 -11.14814,7.83767 -23.62107,14.42481 -36.84575,17.7139 -10.72566,2.66757 -18.69625,1.20562 -33.13151,1.30575 C 310.59858,429.5978 291.1,429.3 281.1,434.3 c -12.2,6 -20.6,17.5 -23.7,32.3 -3.2,15.3 0.11875,24.31875 9.51875,31.11875 4.9,3.6 9.48125,5.48125 25.58125,8.38125 10.2,1.8 14.5,2 29,1.6 19.3,-0.6 27.7,-2.1 45,-7.8 65,-21.6 108.32042,-74.69846 114.2483,-146.4 0.5433,-6.57154 0.51635,-11.00098 0.35824,-16.5 -0.12685,-4.41201 -0.53376,-8.81617 -1.04757,-13.2 -0.31035,-2.64783 -0.73303,-5.28343 -1.22803,-7.90303 -1.04804,-5.54641 -2.17688,-11.08849 -3.68486,-16.52789 -3.8173,-13.76923 -7.04718,-27.944 -13.54608,-40.66908 -8.57845,-16.79692 -6.03317,-32.79012 -12.7776,-53.20969 -5.4006,-16.35095 -14.13511,-31.22562 -25.45092,-47.68672 9.20262,-3.00968 42.04296,-13.97755 50.15501,-17.80255 10.28756,9.39474 26.84483,25.52589 38.78601,40.81146 30.4959,39.03695 51.65187,83.78847 56.2875,132.1875 4.21372,43.99397 -0.37701,62.58021 -7.1,82.25 -6.8,20.7 -14.2,35.95 -22.6,53.65 -14.8,30.9 -37.8,59.1 -65.1,79.7 -34.6,26.2 -53.59209,36.03122 -84.7,43.9 -28.19212,7.13123 -69.76059,13.01808 -98.52157,7.23223 z"
|
||||
id="path163"
|
||||
sodipodi:nodetypes="sssscaaacsasccccccsaaaassccsscccss"
|
||||
style="display:inline;stroke-width:5.68167" /><path
|
||||
d="M 391.3,71.5 C 387.8,70 384,64.8 384,61.4 c 0,-2.7 3.4,-7 7,-8.9 4.9,-2.5 7.8,-1.9 12.2,2.5 3.6,3.5 4,4.4 3.5,7.5 -0.7,4.5 -3.5,8.2 -7.1,9.5 -3.7,1.3 -4.4,1.2 -8.3,-0.5 z"
|
||||
id="path165"
|
||||
style="stroke-width:5.68167" /></g><g
|
||||
fill="#fafcfc"
|
||||
id="g183"
|
||||
inkscape:label="White"
|
||||
style="display:inline;stroke-width:5.68167"><path
|
||||
d="M 292.22204,510.87608 C 280.22101,508.20541 268.81402,500.34672 263.69227,494.9842 275.64093,505.5687 304.1,508.3 321.5,507.7 c 21.55,-2.225 49.37501,-6.43114 86.62589,-28.91732 22.61919,-13.65389 51.87112,-50.42418 60.53015,-75.76929 6.66561,-19.51032 10.07957,-35.4123 12.39396,-53.90714 3.1459,18.64649 1.15198,36.57617 -1.3,46.46875 -2.9,11.1 -6.35,24.125 -11.95,34.225 -8.3,15.1 -27.2,38.1 -39.1,47.8 -25.5,20.5 -61.64365,33.01311 -92.85,36.3 -15.06775,1.58705 -35.15198,-1.1377 -43.62796,-3.02392 z"
|
||||
id="path174"
|
||||
sodipodi:nodetypes="sccssccccss"
|
||||
style="stroke-width:5.68167" /><path
|
||||
d="M 28.4,416.5 C 15.349709,374.67557 18.014551,365.86291 17.43688,340.1625 17.048048,322.86353 19.119484,305.4699 22.5,288.5 c 10.62259,-53.3245 29.9,-91.9 61.3,-131 11,-13.7 40.9,-44 52.7,-53.5 C 166.2,80.3 209,59.4 252,47.5 c 8.5,-2.3 15.6,-4.2 15.7,-4.1 0.1,0.1 -0.4,3.8 -1.2,8.1 -0.8,4.4 -1.4,8.1 -1.5,8.3 0,0.1 -0.8,0.2 -1.9,0.2 -1,0 -6.3,1.4 -11.7,3 -41.6,12.8 -72.7,28.3 -103.6,51.7 -24.8,18.7 -39.9,34 -59.6,60 C 63.3,207.6 42.3,251 34.6,285.5 29.2,309.4 26.825886,327.09972 25.755456,348.16934 24.598916,370.93392 24.8,389.7 28.4,416.5 Z"
|
||||
id="path178"
|
||||
sodipodi:nodetypes="cascccsccsccccac"
|
||||
style="stroke-width:5.68167" /><path
|
||||
d="m 208.22773,289.96967 c 9.51882,-5.66851 21.67237,-10.67386 30.98163,-12.63033 5.43202,-1.14162 18.645,-2.6057 32.04905,-3.10711 14.85841,-0.5558 26.43935,0.0727 34.62618,-2.66291 17.29397,-5.77872 28.56982,-17.26767 32.18039,-30.34042 1.49085,-5.3979 2.16985,-10.98219 1.55113,-16.06452 -0.70068,-5.7556 -3.89365,-15.38399 -6.46854,-18.70034 7.65573,3.55244 13.50421,17.23897 13.20338,31.10442 -0.37371,17.22406 -13.0606,32.1577 -24.74645,38.26377 -9.47406,4.95038 -29.08518,7.77124 -44.57677,8.07938 -10.95355,0.21788 -20.76029,0.67236 -31.82773,2.18839 -11.53232,1.57971 -30.58589,8.52074 -45.60676,17.46672 -7.81866,4.65656 -18.21827,12.44919 -21.26902,14.46609 4.45077,-6.22439 16.85283,-20.2914 29.90351,-28.06314 z"
|
||||
id="path181"
|
||||
sodipodi:nodetypes="sssssscssssscs"
|
||||
style="stroke-width:5.68167" /><path
|
||||
d="m 282.3,76.8 c -1.6,-2.7 -0.6,-19.1 1.6,-25.2 4.3,-12 13.6,-22.7 23.4,-27.1 12.6,-5.5 18.3,-6.7 36.2,-7.2 29.7,-0.9 49.3,2 77,11.3 7.2,2.4 19.8,6.1 28.2,8.3 19.3,5.1 26.3,8.5 30.5,14.9 1.6,2.4 2.5,8.2 1.3,8.2 -0.3,0 -2.6,-1.3 -5.2,-2.9 C 470.5,54.2 463.8,51.9 442,46 435.7,44.2 426,41.1 420.5,39 415,36.9 408.9,34.7 407,34.1 c -12,-3.7 -49.7,-5.9 -71.3,-4.2 -11,0.8 -13.8,1.4 -19.4,4.1 -12.9,6 -24.1,20.5 -27.6,35.9 -1.7,7.2 -4.3,10.1 -6.4,6.9 z"
|
||||
id="path183"
|
||||
style="stroke-width:5.68167" /></g></g><text
|
||||
xml:space="preserve"
|
||||
style="font-size:11.2889px;font-family:Comfortaa;-inkscape-font-specification:Comfortaa;text-align:center;writing-mode:lr-tb;direction:ltr;text-anchor:middle;opacity:0.66761;fill:#083f91;stroke-width:3.307;stroke-linejoin:round;stroke-miterlimit:2.6"
|
||||
x="91.349724"
|
||||
y="151.56494"
|
||||
id="text2"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan2"
|
||||
style="font-size:11.2889px;fill:#000000;fill-opacity:1;stroke-width:3.307"
|
||||
x="91.349724"
|
||||
y="151.56494">pydase</tspan></text></g></svg>
|
||||
|
After Width: | Height: | Size: 17 KiB |
@@ -1 +1,17 @@
|
||||
{% include-markdown "../README.md" %}
|
||||
{%
|
||||
include-markdown "../README.md"
|
||||
start="<!--introduction-start-->"
|
||||
end="<!--introduction-end-->"
|
||||
%}
|
||||
|
||||
[pydase Banner]: ./images/logo-with-text.png
|
||||
[License]: ./about/license.md
|
||||
[Observer Pattern]: ./dev-guide/Observer_Pattern_Implementation.md
|
||||
[Service Persistence]: ./user-guide/Service_Persistence.md
|
||||
[Defining DataService]: ./getting-started.md#defining-a-dataservice
|
||||
[Web Interface Access]: ./getting-started.md#accessing-the-web-interface
|
||||
[Short RPC Client]: ./getting-started.md#connecting-to-the-service-via-python-rpc-client
|
||||
[Customizing Web Interface]: ./user-guide/interaction/README.md#customization-options
|
||||
[Task Management]: ./user-guide/Tasks.md
|
||||
[Units]: ./user-guide/Understanding-Units.md
|
||||
[Property Validation]: ./user-guide/Validating-Property-Setters.md
|
||||
|
||||
@@ -5,6 +5,7 @@ charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0"
|
||||
click==8.1.7 ; python_version >= "3.10" and python_version < "4.0"
|
||||
colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0"
|
||||
ghp-import==2.1.0 ; python_version >= "3.10" and python_version < "4.0"
|
||||
griffe==1.1.0 ; python_version >= "3.10" and python_version < "4.0"
|
||||
idna==3.7 ; python_version >= "3.10" and python_version < "4.0"
|
||||
jinja2==3.1.4 ; python_version >= "3.10" and python_version < "4.0"
|
||||
markdown==3.6 ; python_version >= "3.10" and python_version < "4.0"
|
||||
@@ -14,10 +15,12 @@ mkdocs-autorefs==1.0.1 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-get-deps==0.2.0 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-include-markdown-plugin==3.9.1 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-material-extensions==1.3.1 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-material==9.5.30 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-material==9.5.31 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs-swagger-ui-tag==0.6.10 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocs==1.6.0 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocstrings==0.22.0 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocstrings-python==1.10.8 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocstrings==0.25.2 ; python_version >= "3.10" and python_version < "4.0"
|
||||
mkdocstrings[python]==0.25.2 ; python_version >= "3.10" and python_version < "4.0"
|
||||
packaging==24.1 ; python_version >= "3.10" and python_version < "4.0"
|
||||
paginate==0.5.6 ; python_version >= "3.10" and python_version < "4.0"
|
||||
pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0"
|
||||
|
||||
@@ -1,6 +1,435 @@
|
||||
# Components Guide
|
||||
{%
|
||||
include-markdown "../../README.md"
|
||||
start="<!-- Component User Guide Start -->"
|
||||
end="<!-- Component User Guide End -->"
|
||||
%}
|
||||
|
||||
In `pydase`, components are fundamental building blocks that bridge the Python backend logic with frontend visual representation and interactions. This system can be understood based on the following categories:
|
||||
|
||||
## Built-in Type and Enum Components
|
||||
|
||||
`pydase` automatically maps standard Python data types to their corresponding frontend components:
|
||||
|
||||
- `str`: Translated into a `StringComponent` on the frontend.
|
||||
- `int` and `float`: Manifested as the `NumberComponent`.
|
||||
- `bool`: Rendered as a `ButtonComponent`.
|
||||
- `list`: Each item displayed individually, named after the list attribute and its index.
|
||||
- `dict`: Each key-value pair displayed individually, named after the dictionary attribute and its key. **Note** that the dictionary keys must be strings.
|
||||
- `enum.Enum`: Presented as an `EnumComponent`, facilitating dropdown selection.
|
||||
|
||||
## Method Components
|
||||
Within the `DataService` class of `pydase`, only methods devoid of arguments can be represented in the frontend, classified into two distinct categories
|
||||
|
||||
1. [**Tasks**](./Tasks.md): Argument-free asynchronous functions, identified within `pydase` as tasks, are inherently designed for frontend interaction. These tasks are automatically rendered with a start/stop button, allowing users to initiate or halt the task execution directly from the web interface.
|
||||
2. **Synchronous Methods with `@frontend` Decorator**: Synchronous methods without arguments can also be presented in the frontend. For this, they have to be decorated with the `@frontend` decorator.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
import pydase.components
|
||||
import pydase.units as u
|
||||
from pydase.utils.decorators import frontend
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
@frontend
|
||||
def exposed_method(self) -> None:
|
||||
...
|
||||
|
||||
async def my_task(self) -> None:
|
||||
while True:
|
||||
# ...
|
||||
```
|
||||
|
||||

|
||||
|
||||
You can still define synchronous tasks with arguments and call them using a python client. However, decorating them with the `@frontend` decorator will raise a `FunctionDefinitionError`. Defining a task with arguments will raise a `TaskDefinitionError`.
|
||||
I decided against supporting function arguments for functions rendered in the frontend due to the following reasons:
|
||||
|
||||
1. Feature Request Pitfall: supporting function arguments create a bottomless pit of feature requests. As users encounter the limitations of supported types, demands for extending support to more complex types would grow.
|
||||
2. Complexity in Supported Argument Types: while simple types like `int`, `float`, `bool` and `str` could be easily supported, more complicated types are not (representation, (de-)serialization).
|
||||
|
||||
## DataService Instances (Nested Classes)
|
||||
|
||||
Nested `DataService` instances offer an organized hierarchy for components, enabling richer applications. Each nested class might have its own attributes and methods, each mapped to a frontend component.
|
||||
|
||||
Here is an example:
|
||||
|
||||
```python
|
||||
from pydase import DataService, Server
|
||||
|
||||
|
||||
class Channel(DataService):
|
||||
def __init__(self, channel_id: int) -> None:
|
||||
super().__init__()
|
||||
self._channel_id = channel_id
|
||||
self._current = 0.0
|
||||
|
||||
@property
|
||||
def current(self) -> float:
|
||||
# run code to get current
|
||||
result = self._current
|
||||
return result
|
||||
|
||||
@current.setter
|
||||
def current(self, value: float) -> None:
|
||||
# run code to set current
|
||||
self._current = value
|
||||
|
||||
|
||||
class Device(DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.channels = [Channel(i) for i in range(2)]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = Device()
|
||||
Server(service).run()
|
||||
```
|
||||
|
||||

|
||||
|
||||
**Note** that defining classes within `DataService` classes is not supported (see [this issue](https://github.com/tiqi-group/pydase/issues/16)).
|
||||
|
||||
## Custom Components (`pydase.components`)
|
||||
|
||||
The custom components in `pydase` have two main parts:
|
||||
|
||||
- A **Python Component Class** in the backend, implementing the logic needed to set, update, and manage the component's state and data.
|
||||
- A **Frontend React Component** that renders and manages user interaction in the browser.
|
||||
|
||||
Below are the components available in the `pydase.components` module, accompanied by their Python usage:
|
||||
|
||||
### `DeviceConnection`
|
||||
|
||||
The `DeviceConnection` component acts as a base class within the `pydase` framework for managing device connections. It provides a structured approach to handle connections by offering a customizable `connect` method and a `connected` property. This setup facilitates the implementation of automatic reconnection logic, which periodically attempts reconnection whenever the connection is lost.
|
||||
|
||||
In the frontend, this class abstracts away the direct interaction with the `connect` method and the `connected` property. Instead, it showcases user-defined attributes, methods, and properties. When the `connected` status is `False`, the frontend displays an overlay that prompts manual reconnection through the `connect()` method. Successful reconnection removes the overlay.
|
||||
|
||||
```python
|
||||
import pydase.components
|
||||
import pydase.units as u
|
||||
|
||||
|
||||
class Device(pydase.components.DeviceConnection):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._voltage = 10 * u.units.V
|
||||
|
||||
def connect(self) -> None:
|
||||
if not self._connected:
|
||||
self._connected = True
|
||||
|
||||
@property
|
||||
def voltage(self) -> float:
|
||||
return self._voltage
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.device = Device()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service_instance = MyService()
|
||||
pydase.Server(service_instance).run()
|
||||
```
|
||||
|
||||

|
||||
|
||||
#### Customizing Connection Logic
|
||||
|
||||
Users are encouraged to primarily override the `connect` method to tailor the connection process to their specific device. This method should adjust the `self._connected` attribute based on the outcome of the connection attempt:
|
||||
|
||||
```python
|
||||
import pydase.components
|
||||
|
||||
|
||||
class MyDeviceConnection(pydase.components.DeviceConnection):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
# Add any necessary initialization code here
|
||||
|
||||
def connect(self) -> None:
|
||||
# Implement device-specific connection logic here
|
||||
# Update self._connected to `True` if the connection is successful,
|
||||
# or `False` if unsuccessful
|
||||
...
|
||||
```
|
||||
|
||||
Moreover, if the connection status requires additional logic, users can override the `connected` property:
|
||||
|
||||
```python
|
||||
import pydase.components
|
||||
|
||||
class MyDeviceConnection(pydase.components.DeviceConnection):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
# Add any necessary initialization code here
|
||||
|
||||
def connect(self) -> None:
|
||||
# Implement device-specific connection logic here
|
||||
# Ensure self._connected reflects the connection status accurately
|
||||
...
|
||||
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
# Implement custom logic to accurately report connection status
|
||||
return self._connected
|
||||
```
|
||||
|
||||
#### Reconnection Interval
|
||||
|
||||
The `DeviceConnection` component automatically executes a task that checks for device availability at a default interval of 10 seconds. This interval is adjustable by modifying the `_reconnection_wait_time` attribute on the class instance.
|
||||
|
||||
### `Image`
|
||||
|
||||
This component provides a versatile interface for displaying images within the application. Users can update and manage images from various sources, including local paths, URLs, and even matplotlib figures.
|
||||
|
||||
The component offers methods to load images seamlessly, ensuring that visual content is easily integrated and displayed within the data service.
|
||||
|
||||
```python
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pydase
|
||||
from pydase.components.image import Image
|
||||
|
||||
|
||||
class MyDataService(pydase.DataService):
|
||||
my_image = Image()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyDataService()
|
||||
# loading from local path
|
||||
service.my_image.load_from_path("/your/image/path/")
|
||||
|
||||
# loading from a URL
|
||||
service.my_image.load_from_url("https://cataas.com/cat")
|
||||
|
||||
# loading a matplotlib figure
|
||||
fig = plt.figure()
|
||||
x = np.linspace(0, 2 * np.pi)
|
||||
plt.plot(x, np.sin(x))
|
||||
plt.grid()
|
||||
service.my_image.load_from_matplotlib_figure(fig)
|
||||
|
||||
pydase.Server(service).run()
|
||||
```
|
||||
|
||||

|
||||
|
||||
### `NumberSlider`
|
||||
|
||||
The `NumberSlider` component in the `pydase` package provides an interactive slider interface for adjusting numerical values on the frontend. It is designed to support both numbers and quantities and ensures that values adjusted on the frontend are synchronized with the backend.
|
||||
|
||||
To utilize the `NumberSlider`, users should implement a class that derives from `NumberSlider`. This class can then define the initial values, minimum and maximum limits, step sizes, and additional logic as needed.
|
||||
|
||||
Here's an example of how to implement and use a custom slider:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
import pydase.components
|
||||
|
||||
|
||||
class MySlider(pydase.components.NumberSlider):
|
||||
def __init__(
|
||||
self,
|
||||
value: float = 0.0,
|
||||
min_: float = 0.0,
|
||||
max_: float = 100.0,
|
||||
step_size: float = 1.0,
|
||||
) -> None:
|
||||
super().__init__(value, min_, max_, step_size)
|
||||
|
||||
@property
|
||||
def min(self) -> float:
|
||||
return self._min
|
||||
|
||||
@min.setter
|
||||
def min(self, value: float) -> None:
|
||||
self._min = value
|
||||
|
||||
@property
|
||||
def max(self) -> float:
|
||||
return self._max
|
||||
|
||||
@max.setter
|
||||
def max(self, value: float) -> None:
|
||||
self._max = value
|
||||
|
||||
@property
|
||||
def step_size(self) -> float:
|
||||
return self._step_size
|
||||
|
||||
@step_size.setter
|
||||
def step_size(self, value: float) -> None:
|
||||
self._step_size = value
|
||||
|
||||
@property
|
||||
def value(self) -> float:
|
||||
"""Slider value."""
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: float) -> None:
|
||||
if value < self._min or value > self._max:
|
||||
raise ValueError("Value is either below allowed min or above max value.")
|
||||
|
||||
self._value = value
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.voltage = MySlider()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service_instance = MyService()
|
||||
service_instance.voltage.value = 5
|
||||
print(service_instance.voltage.value) # Output: 5
|
||||
pydase.Server(service_instance).run()
|
||||
```
|
||||
|
||||
In this example, `MySlider` overrides the `min`, `max`, `step_size`, and `value` properties. Users can make any of these properties read-only by omitting the corresponding setter method.
|
||||
|
||||

|
||||
|
||||
- Accessing parent class resources in `NumberSlider`
|
||||
|
||||
In scenarios where you need the slider component to interact with or access resources from its parent class, you can achieve this by passing a callback function to it. This method avoids directly passing the entire parent class instance (`self`) and offers a more encapsulated approach. The callback function can be designed to utilize specific attributes or methods of the parent class, allowing the slider to perform actions or retrieve data in response to slider events.
|
||||
|
||||
Here's an illustrative example:
|
||||
|
||||
```python
|
||||
from collections.abc import Callable
|
||||
|
||||
import pydase
|
||||
import pydase.components
|
||||
|
||||
|
||||
class MySlider(pydase.components.NumberSlider):
|
||||
def __init__(
|
||||
self,
|
||||
value: float,
|
||||
on_change: Callable[[float], None],
|
||||
) -> None:
|
||||
super().__init__(value=value)
|
||||
self._on_change = on_change
|
||||
|
||||
# ... other properties ...
|
||||
|
||||
@property
|
||||
def value(self) -> float:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, new_value: float) -> None:
|
||||
if new_value < self._min or new_value > self._max:
|
||||
raise ValueError("Value is either below allowed min or above max value.")
|
||||
self._value = new_value
|
||||
self._on_change(new_value)
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
self.voltage = MySlider(
|
||||
5,
|
||||
on_change=self.handle_voltage_change,
|
||||
)
|
||||
|
||||
def handle_voltage_change(self, new_voltage: float) -> None:
|
||||
print(f"Voltage changed to: {new_voltage}")
|
||||
# Additional logic here
|
||||
|
||||
if __name__ == "__main__":
|
||||
service_instance = MyService()
|
||||
my_service.voltage.value = 7 # Output: "Voltage changed to: 7"
|
||||
pydase.Server(service_instance).run()
|
||||
```
|
||||
|
||||
- Incorporating units in `NumberSlider`
|
||||
|
||||
The `NumberSlider` is capable of [displaying units](./Understanding-Units.md) alongside values, enhancing its usability in contexts where unit representation is crucial. When utilizing `pydase.units`, you can specify units for the slider's value, allowing the component to reflect these units in the frontend.
|
||||
|
||||
Here's how to implement a `NumberSlider` with unit display:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
import pydase.components
|
||||
import pydase.units as u
|
||||
|
||||
class MySlider(pydase.components.NumberSlider):
|
||||
def __init__(
|
||||
self,
|
||||
value: u.Quantity = 0.0 * u.units.V,
|
||||
) -> None:
|
||||
super().__init__(value)
|
||||
|
||||
@property
|
||||
def value(self) -> u.Quantity:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: u.Quantity) -> None:
|
||||
if value.m < self._min or value.m > self._max:
|
||||
raise ValueError("Value is either below allowed min or above max value.")
|
||||
self._value = value
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.voltage = MySlider()
|
||||
|
||||
if __name__ == "__main__":
|
||||
service_instance = MyService()
|
||||
service_instance.voltage.value = 5 * u.units.V
|
||||
print(service_instance.voltage.value) # Output: 5 V
|
||||
pydase.Server(service_instance).run()
|
||||
```
|
||||
|
||||
### `ColouredEnum`
|
||||
|
||||
This component provides a way to visually represent different states or categories in a data service using colour-coded options. It behaves similarly to a standard `Enum`, but the values encode colours in a format understood by CSS. The colours can be defined using various methods like Hexadecimal, RGB, HSL, and more.
|
||||
|
||||
If the property associated with the `ColouredEnum` has a setter function, the keys of the enum will be rendered as a dropdown menu, allowing users to interact and select different options. Without a setter function, the selected key will simply be displayed as a coloured box with text inside, serving as a visual indicator.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
import pydase.components as pyc
|
||||
|
||||
|
||||
class MyStatus(pyc.ColouredEnum):
|
||||
PENDING = "#FFA500" # Hexadecimal colour (Orange)
|
||||
RUNNING = "#0000FF80" # Hexadecimal colour with transparency (Blue)
|
||||
PAUSED = "rgb(169, 169, 169)" # RGB colour (Dark Gray)
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)" # RGB colour with transparency (Yellow)
|
||||
COMPLETED = "hsl(120, 100%, 50%)" # HSL colour (Green)
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)" # HSL colour with transparency (Red)
|
||||
CANCELLED = "SlateGray" # Cross-browser colour name (Slate Gray)
|
||||
|
||||
|
||||
class StatusTest(pydase.DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# do something ...
|
||||
self._status = value
|
||||
|
||||
# Modifying or accessing the status value:
|
||||
my_service = StatusExample()
|
||||
my_service.status = MyStatus.FAILED
|
||||
```
|
||||
|
||||

|
||||
|
||||
**Note** that each enumeration name and value must be unique.
|
||||
This means that you should use different colour formats when you want to use a colour multiple times.
|
||||
|
||||
### Extending with New Components
|
||||
|
||||
Users can also extend the library by creating custom components. This involves defining the behavior on the Python backend and the visual representation on the frontend. For those looking to introduce new components, the [guide on adding components](https://pydase.readthedocs.io/en/latest/dev-guide/Adding_Components/) provides detailed steps on achieving this.
|
||||
|
||||
|
||||
211
docs/user-guide/Configuration.md
Normal file
@@ -0,0 +1,211 @@
|
||||
|
||||
# Configuring `pydase`
|
||||
|
||||
## Do I Need to Configure My `pydase` Service?
|
||||
|
||||
`pydase` services work out of the box without requiring any configuration. However, you
|
||||
might want to change some options, such as the web server port or logging level. To
|
||||
accommodate such customizations, `pydase` allows configuration through environment
|
||||
variables - avoiding hard-coded settings in your service code.
|
||||
|
||||
Why should you avoid hard-coding configurations? Here are two reasons:
|
||||
|
||||
1. **Security**:
|
||||
Protect sensitive information, such as usernames and passwords. By using environment
|
||||
variables, your service code can remain public while keeping private information
|
||||
secure.
|
||||
|
||||
2. **Reusability**:
|
||||
Services often need to be reused in different environments. For example, you might
|
||||
deploy multiple instances of a service (e.g., for different sensors in a lab). By
|
||||
separating configuration from code, you can adapt the service to new requirements
|
||||
without modifying its codebase.
|
||||
|
||||
Next, we’ll walk you through the environment variables `pydase` supports and provide an
|
||||
example of how to separate service code from configuration.
|
||||
|
||||
## Configuring `pydase` Using Environment Variables
|
||||
|
||||
`pydase` provides the following environment variables for customization:
|
||||
|
||||
- **`ENVIRONMENT`**:
|
||||
Defines the operation mode (`"development"` or `"production"`), which influences
|
||||
behaviour such as logging (see [Logging in pydase](https://github.com/tiqi-group/pydase?tab=readme-ov-file#logging-in-pydase)).
|
||||
|
||||
- **`SERVICE_CONFIG_DIR`**:
|
||||
Specifies the directory for configuration files (e.g., `web_settings.json`). Defaults
|
||||
to the `config` folder in the service root. Access this programmatically using:
|
||||
|
||||
```python
|
||||
import pydase.config
|
||||
pydase.config.ServiceConfig().config_dir
|
||||
```
|
||||
|
||||
- **`SERVICE_WEB_PORT`**:
|
||||
Defines the web server’s port. Ensure each service on the same host uses a unique
|
||||
port. Default: `8001`.
|
||||
|
||||
- **`GENERATE_WEB_SETTINGS`**:
|
||||
When `true`, generates or updates the `web_settings.json` file. Existing entries are
|
||||
preserved, and new entries are appended.
|
||||
|
||||
### Configuring `pydase` via Keyword Arguments
|
||||
|
||||
Some settings can also be overridden directly in your service code using keyword
|
||||
arguments when initializing the server. This allows for flexibility in code-based
|
||||
configuration:
|
||||
|
||||
```python
|
||||
import pathlib
|
||||
from pydase import Server
|
||||
from your_service_module import YourService
|
||||
|
||||
server = Server(
|
||||
YourService(),
|
||||
web_port=8080, # Overrides SERVICE_WEB_PORT
|
||||
config_dir=pathlib.Path("custom_config"), # Overrides SERVICE_CONFIG_DIR
|
||||
generate_web_settings=True # Overrides GENERATE_WEB_SETTINGS
|
||||
).run()
|
||||
```
|
||||
|
||||
## Separating Service Code from Configuration
|
||||
|
||||
To decouple configuration from code, `pydase` utilizes `confz` for configuration
|
||||
management. Below is an example that demonstrates how to configure a `pydase` service
|
||||
for a sensor readout application.
|
||||
|
||||
### Scenario: Configuring a Sensor Service
|
||||
|
||||
Imagine you have multiple sensors distributed across your lab. You need to configure
|
||||
each service instance with:
|
||||
|
||||
1. **Hostname**: The hostname or IP address of the sensor.
|
||||
2. **Authentication Token**: A token or credentials to authenticate with the sensor.
|
||||
3. **Readout Interval**: A periodic interval to read sensor data and log it to a
|
||||
database.
|
||||
|
||||
Given the repository structure:
|
||||
|
||||
```bash title="Service Repository Structure"
|
||||
my_sensor
|
||||
├── pyproject.toml
|
||||
├── README.md
|
||||
└── src
|
||||
└── my_sensor
|
||||
├── my_sensor.py
|
||||
├── config.py
|
||||
├── __init__.py
|
||||
└── __main__.py
|
||||
```
|
||||
|
||||
Your service might look like this:
|
||||
|
||||
### Configuration
|
||||
|
||||
Define the configuration using `confz`:
|
||||
|
||||
```python title="src/my_sensor/config.py"
|
||||
import confz
|
||||
from pydase.config import ServiceConfig
|
||||
|
||||
class MySensorConfig(confz.BaseConfig):
|
||||
instance_name: str
|
||||
hostname: str
|
||||
auth_token: str
|
||||
readout_interval_s: float
|
||||
|
||||
CONFIG_SOURCES = confz.FileSource(file=ServiceConfig().config_dir / "config.yaml")
|
||||
```
|
||||
|
||||
This class defines configurable parameters and loads values from a `config.yaml` file
|
||||
located in the service’s configuration directory (which is configurable through an
|
||||
environment variable, see [above](#configuring-pydase-using-environment-variables)).
|
||||
A sample YAML file might look like this:
|
||||
|
||||
```yaml title="config.yaml"
|
||||
instance_name: my-sensor-service-01
|
||||
hostname: my-sensor-01.example.com
|
||||
auth_token: my-secret-authentication-token
|
||||
readout_interval_s: 5
|
||||
```
|
||||
|
||||
### Service Implementation
|
||||
|
||||
Your service implementation might look like this:
|
||||
|
||||
```python title="src/my_sensor/my_sensor.py"
|
||||
import asyncio
|
||||
import http.client
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pydase.components
|
||||
import pydase.units as u
|
||||
from pydase.task.decorator import task
|
||||
|
||||
from my_sensor.config import MySensorConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MySensor(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.readout_interval_s: u.Quantity = (
|
||||
MySensorConfig().readout_interval_s * u.units.s
|
||||
)
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
"""Hostname of the sensor. Read-only."""
|
||||
return MySensorConfig().hostname
|
||||
|
||||
def _get_data(self) -> dict[str, Any]:
|
||||
"""Fetches sensor data via an HTTP GET request. It passes the authentication
|
||||
token as "Authorization" header."""
|
||||
|
||||
connection = http.client.HTTPConnection(self.hostname, timeout=10)
|
||||
connection.request(
|
||||
"GET", "/", headers={"Authorization": MySensorConfig().auth_token}
|
||||
)
|
||||
response = connection.getresponse()
|
||||
connection.close()
|
||||
|
||||
return json.loads(response.read())
|
||||
|
||||
@task(autostart=True)
|
||||
async def get_and_log_sensor_values(self) -> None:
|
||||
"""Periodically fetches and logs sensor data."""
|
||||
while True:
|
||||
try:
|
||||
data = self._get_data()
|
||||
# Write data to database using MySensorConfig().instance_name ...
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error occurred, retrying in %s seconds. Error: %s",
|
||||
self.readout_interval_s.m,
|
||||
e,
|
||||
)
|
||||
await asyncio.sleep(self.readout_interval_s.m)
|
||||
```
|
||||
|
||||
### Starting the Service
|
||||
|
||||
The service is launched via the `__main__.py` entry point:
|
||||
|
||||
```python title="src/my_sensor/__main__.py"
|
||||
import pydase
|
||||
from my_sensor.my_sensor import MySensor
|
||||
|
||||
pydase.Server(MySensor()).run()
|
||||
```
|
||||
|
||||
You can now start the service with:
|
||||
|
||||
```bash
|
||||
python -m my_sensor
|
||||
```
|
||||
|
||||
This approach ensures the service is fully configured via the `config.yaml` file,
|
||||
separating service logic from configuration.
|
||||
49
docs/user-guide/Service_Persistence.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# Understanding Service Persistence
|
||||
|
||||
`pydase` allows you to easily persist the state of your service by saving it to a file. This is especially useful when you want to maintain the service's state across different runs.
|
||||
|
||||
To save the state of your service, pass a `filename` keyword argument to the constructor of the `pydase.Server` class. If the file specified by `filename` does not exist, the state manager will create this file and store its state in it when the service is shut down. If the file already exists, the state manager will load the state from this file, setting the values of its attributes to the values stored in the file.
|
||||
|
||||
Here's an example:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
class Device(pydase.DataService):
|
||||
# ... defining the Device class ...
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = Device()
|
||||
pydase.Server(service=service, filename="device_state.json").run()
|
||||
```
|
||||
|
||||
In this example, the state of the `Device` service will be saved to `device_state.json` when the service is shut down. If `device_state.json` exists when the server is started, the state manager will restore the state of the service from this file.
|
||||
|
||||
## Controlling Property State Loading with `@load_state`
|
||||
|
||||
By default, the state manager only restores values for public attributes of your service. If you have properties that you want to control the loading for, you can use the `@load_state` decorator on your property setters. This indicates to the state manager that the value of the property should be loaded from the state file.
|
||||
|
||||
Here is how you can apply the `@load_state` decorator:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
from pydase.data_service.state_manager import load_state
|
||||
|
||||
class Device(pydase.DataService):
|
||||
_name = "Default Device Name"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
@load_state
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
```
|
||||
|
||||
With the `@load_state` decorator applied to the `name` property setter, the state manager will load and apply the `name` property's value from the file storing the state upon server startup, assuming it exists.
|
||||
|
||||
Note: If the service class structure has changed since the last time its state was saved, only the attributes and properties decorated with `@load_state` that have remained the same will be restored from the settings file.
|
||||
|
||||
38
docs/user-guide/Tasks.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Understanding Tasks
|
||||
|
||||
In `pydase`, a task is defined as an asynchronous function without arguments that is decorated with the `@task` decorator and contained in a class that inherits from `pydase.DataService`. These tasks usually contain a while loop and are designed to carry out periodic functions. For example, a task might be used to periodically read sensor data, update a database, or perform any other recurring job.
|
||||
|
||||
`pydase` allows you to control task execution via both the frontend and Python clients and can automatically start tasks upon initialization of the service. By using the `@task` decorator with the `autostart=True` argument in your service class, `pydase` will automatically start these tasks when the server is started. Here's an example:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
from pydase.task.decorator import task
|
||||
|
||||
|
||||
class SensorService(pydase.DataService):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.readout_frequency = 1.0
|
||||
|
||||
def _process_data(self, data: ...) -> None:
|
||||
...
|
||||
|
||||
def _read_from_sensor(self) -> Any:
|
||||
...
|
||||
|
||||
@task(autostart=True)
|
||||
async def read_sensor_data(self):
|
||||
while True:
|
||||
data = self._read_from_sensor()
|
||||
self._process_data(data) # Process the data as needed
|
||||
await asyncio.sleep(self.readout_frequency)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = SensorService()
|
||||
pydase.Server(service=service).run()
|
||||
```
|
||||
|
||||
In this example, `read_sensor_data` is a task that continuously reads data from a sensor. By decorating it with `@task(autostart=True)`, it will automatically start running when `pydase.Server(service).run()` is executed.
|
||||
|
||||
The `@task` decorator replaces the function with a task object that has `start()` and `stop()` methods. This means you can control the task execution directly using these methods. For instance, you can manually start or stop the task by calling `service.read_sensor_data.start()` and `service.read_sensor_data.stop()`, respectively.
|
||||
64
docs/user-guide/Understanding-Units.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# Understanding Units
|
||||
|
||||
`pydase` integrates with the [`pint`](https://pint.readthedocs.io/en/stable/) package to allow you to work with physical quantities within your service. This enables you to define attributes with units, making your service more expressive and ensuring consistency in the handling of physical quantities.
|
||||
|
||||
You can define quantities in your `pydase.DataService` subclass using the `pydase.units` module.
|
||||
Here's an example:
|
||||
|
||||
```python
|
||||
from typing import Any
|
||||
|
||||
import pydase
|
||||
import pydase.units as u
|
||||
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
voltage = 1.0 * u.units.V
|
||||
_current: u.Quantity = 1.0 * u.units.mA
|
||||
|
||||
@property
|
||||
def current(self) -> u.Quantity:
|
||||
return self._current
|
||||
|
||||
@current.setter
|
||||
def current(self, value: u.Quantity) -> None:
|
||||
self._current = value
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = ServiceClass()
|
||||
|
||||
service.voltage = 10.0 * u.units.V
|
||||
service.current = 1.5 * u.units.mA
|
||||
|
||||
pydase.Server(service=service).run()
|
||||
```
|
||||
|
||||
In the frontend, quantities are rendered as floats, with the unit displayed as additional text. This allows you to maintain a clear and consistent representation of physical quantities across both the backend and frontend of your service.
|
||||

|
||||
|
||||
Should you need to access the magnitude or the unit of a quantity, you can use the `.m` attribute or the `.u` attribute of the variable, respectively. For example, this could be necessary to set the periodicity of a task:
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
import pydase
|
||||
import pydase.units as u
|
||||
|
||||
|
||||
class ServiceClass(pydase.DataService):
|
||||
readout_wait_time = 1.0 * u.units.ms
|
||||
|
||||
async def read_sensor_data(self):
|
||||
while True:
|
||||
print("Reading out sensor ...")
|
||||
await asyncio.sleep(self.readout_wait_time.to("s").m)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = ServiceClass()
|
||||
|
||||
pydase.Server(service=service).run()
|
||||
```
|
||||
|
||||
For more information about what you can do with the units, please consult the documentation of [`pint`](https://pint.readthedocs.io/en/stable/).
|
||||
|
||||
38
docs/user-guide/Validating-Property-Setters.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Using `validate_set` to Validate Property Setters
|
||||
|
||||
The `validate_set` decorator ensures that a property setter reads back the set value using the property getter and checks it against the desired value.
|
||||
This decorator can be used to validate that a parameter has been correctly set on a device within a specified precision and timeout.
|
||||
|
||||
The decorator takes two keyword arguments: `timeout` and `precision`. The `timeout` argument specifies the maximum time (in seconds) to wait for the value to be within the precision boundary.
|
||||
If the value is not within the precision boundary after this time, an exception is raised.
|
||||
The `precision` argument defines the acceptable deviation from the desired value.
|
||||
If `precision` is `None`, the value must be exact.
|
||||
For example, if `precision` is set to `1e-5`, the value read from the device must be within ±0.00001 of the desired value.
|
||||
|
||||
Here’s how to use the `validate_set` decorator in a `DataService` class:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
from pydase.observer_pattern.observable.decorators import validate_set
|
||||
|
||||
|
||||
class Service(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._device = RemoteDevice() # dummy class
|
||||
|
||||
@property
|
||||
def value(self) -> float:
|
||||
# Implement how to get the value from the remote device...
|
||||
return self._device.value
|
||||
|
||||
@value.setter
|
||||
@validate_set(timeout=1.0, precision=1e-5)
|
||||
def value(self, value: float) -> None:
|
||||
# Implement how to set the value on the remote device...
|
||||
self._device.value = value
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pydase.Server(service=Service()).run()
|
||||
```
|
||||
59
docs/user-guide/advanced/Reverse-Proxy.md
Normal file
@@ -0,0 +1,59 @@
|
||||
# Deploying Services Behind a Reverse Proxy
|
||||
|
||||
In some environments, you may need to deploy your services behind a reverse proxy. Typically, this involves adding a CNAME record for your service that points to the reverse proxy in your DNS server. The proxy then routes requests to the `pydase` backend on the appropriate web server port.
|
||||
|
||||
However, in scenarios where you don’t control the DNS server, or where adding new CNAME records is time-consuming, `pydase` supports **service multiplexing** using a path prefix. This means multiple services can be hosted on a single CNAME (e.g., `services.example.com`), with each service accessible through a unique path such as `services.example.com/my-service`.
|
||||
|
||||
To ensure seamless operation, the reverse proxy must strip the path prefix (e.g., `/my-service`) from the request URL and forward it as the `X-Forwarded-Prefix` header. `pydase` then uses this header to dynamically adjust the frontend paths, ensuring all resources are correctly located.
|
||||
|
||||
## Example Deployment with Traefik
|
||||
|
||||
Below is an example setup using [Traefik](https://doc.traefik.io/traefik/), a widely-used reverse proxy. This configuration demonstrates how to forward requests for a `pydase` service using a path prefix.
|
||||
|
||||
### 1. Reverse Proxy Configuration
|
||||
|
||||
Save the following configuration to a file (e.g., `/etc/traefik/dynamic_conf/my-service-config.yml`):
|
||||
|
||||
```yaml
|
||||
http:
|
||||
routers:
|
||||
my-service-route:
|
||||
rule: PathPrefix(`/my-service`)
|
||||
entryPoints:
|
||||
- web
|
||||
service: my-service
|
||||
middlewares:
|
||||
- strip-prefix
|
||||
services:
|
||||
my-service:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: http://127.0.0.1:8001
|
||||
middlewares:
|
||||
strip-prefix:
|
||||
stripprefix:
|
||||
prefixes: /my-service
|
||||
```
|
||||
|
||||
This configuration:
|
||||
|
||||
- Routes requests with the path prefix `/my-service` to the `pydase` backend.
|
||||
- Strips the prefix (`/my-service`) from the request URL using the `stripprefix` middleware.
|
||||
- Forwards the stripped prefix as the `X-Forwarded-Prefix` header.
|
||||
|
||||
### 2. Static Configuration for Traefik
|
||||
|
||||
Ensure Traefik is set up to use the dynamic configuration. Add this to your Traefik static configuration (e.g., `/etc/traefik/traefik.yml`):
|
||||
|
||||
```yaml
|
||||
providers:
|
||||
file:
|
||||
filename: /etc/traefik/dynamic_conf/my-service-config.yml
|
||||
entrypoints:
|
||||
web:
|
||||
address: ":80"
|
||||
```
|
||||
|
||||
### 3. Accessing the Service
|
||||
|
||||
Once configured, your `pydase` service will be accessible at `http://services.example.com/my-service`. The path prefix will be handled transparently by `pydase`, so you don’t need to make any changes to your application code or frontend resources.
|
||||
@@ -21,7 +21,8 @@ The frontend uses a component-based approach, representing various data types an
|
||||
`pydase` allows you to enhance the user experience by customizing the web interface's appearance through
|
||||
|
||||
1. a custom CSS file, and
|
||||
2. tailoring the frontend component layout and display style.
|
||||
2. a custom favicon image, and
|
||||
3. tailoring the frontend component layout and display style.
|
||||
|
||||
For more advanced customization, you can provide a completely custom frontend source.
|
||||
|
||||
@@ -51,6 +52,34 @@ This will apply the styles defined in `custom.css` to the web interface, allowin
|
||||
|
||||
Please ensure that the CSS file path is accessible from the server's running location. Relative or absolute paths can be used depending on your setup.
|
||||
|
||||
|
||||
### Custom favicon image
|
||||
|
||||
You can customize the favicon displayed in the browser tab by providing your own favicon image file during the server initialization.
|
||||
|
||||
Here's how you can use this feature:
|
||||
|
||||
1. Prepare your custom favicon image (e.g. a `.png` file).
|
||||
2. Pass the path to your favicon file as the `favicon_path` argument when initializing the `Server` class.
|
||||
|
||||
Here’s an example:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
# ... your service definition ...
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
pydase.Server(service, favicon_path="./my/local/my-favicon.png").run()
|
||||
```
|
||||
|
||||
This will serve the specified image instead of the default `pydase` logo.
|
||||
|
||||
|
||||
### Tailoring Frontend Component Layout
|
||||
|
||||
You can customize the display names, visibility, and order of components via the `web_settings.json` file.
|
||||
@@ -60,7 +89,7 @@ Each key in the file corresponds to the full access path of public attributes, p
|
||||
- **Control Component Visibility**: Utilize the `"display"` key-value pair to control whether a component is rendered in the frontend. Set the value to `true` to make the component visible or `false` to hide it.
|
||||
- **Adjustable Component Order**: The `"displayOrder"` values determine the order of components. Alter these values to rearrange the components as desired. The value defaults to [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER).
|
||||
|
||||
The `web_settings.json` file will be stored in the directory specified by `SERVICE_CONFIG_DIR`. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](#configuring-pydase-via-environment-variables).
|
||||
The `web_settings.json` file will be stored in the directory specified by the `SERVICE_CONFIG_DIR` environment variable. You can generate a `web_settings.json` file by setting the `GENERATE_WEB_SETTINGS` to `True`. For more information, see the [configuration section](../Configuration).
|
||||
|
||||
For example, styling the following service
|
||||
|
||||
|
||||
@@ -1,45 +1,86 @@
|
||||
# Python Client
|
||||
# Python RPC Client
|
||||
|
||||
You can connect to the service using the `pydase.Client`. Below is an example of how to establish a connection to a service and interact with it:
|
||||
The [`pydase.Client`][pydase.Client] allows you to connect to a remote `pydase` service using socket.io, facilitating interaction with the service as though it were running locally.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
# Replace the hostname and port with the IP address and the port of the machine
|
||||
# where the service is running, respectively
|
||||
client_proxy = pydase.Client(hostname="<ip_addr>", port=8001).proxy
|
||||
# Replace <ip_addr> and <service_port> with the appropriate values for your service
|
||||
client_proxy = pydase.Client(url="ws://<ip_addr>:<service_port>").proxy
|
||||
# For SSL-encrypted services, use the wss protocol
|
||||
# client_proxy = pydase.Client(url="wss://your-domain.ch").proxy
|
||||
|
||||
# Interact with the service attributes as if they were local
|
||||
client_proxy.voltage = 5.0
|
||||
print(client_proxy.voltage) # Expected output: 5.0
|
||||
```
|
||||
|
||||
This example demonstrates setting and retrieving the `voltage` attribute through the client proxy.
|
||||
The proxy acts as a local representative of the remote service, enabling straightforward interaction.
|
||||
This example shows how to set and retrieve the `voltage` attribute through the client proxy.
|
||||
The proxy acts as a local representation of the remote service, enabling intuitive interaction.
|
||||
|
||||
The proxy class dynamically synchronizes with the server's exposed attributes. This synchronization allows the proxy to be automatically updated with any attributes or methods that the server exposes, essentially mirroring the server's API. This dynamic updating enables users to interact with the remote service as if they were working with a local object.
|
||||
The proxy class automatically synchronizes with the server's attributes and methods, keeping itself up-to-date with any changes. This dynamic synchronization essentially mirrors the server's API, making it feel like you're working with a local object.
|
||||
|
||||
## Context Manager Support
|
||||
|
||||
You can also use the client within a context manager, which automatically handles connection management (i.e., opening and closing the connection):
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
|
||||
with pydase.Client(url="ws://localhost:8001") as client:
|
||||
client.proxy.my_method()
|
||||
```
|
||||
|
||||
Using the context manager ensures that connections are cleanly closed once the block of code finishes executing.
|
||||
|
||||
## Tab Completion Support
|
||||
|
||||
In interactive environments such as Python interpreters and Jupyter notebooks, the proxy class supports tab completion, which allows users to explore available methods and attributes.
|
||||
In interactive environments like Python interpreters or Jupyter notebooks, the proxy supports tab completion. This allows users to explore available methods and attributes.
|
||||
|
||||
## Integration within Other Services
|
||||
## Integrating the Client into Another Service
|
||||
|
||||
You can also integrate a client proxy within another service. Here's how you can set it up:
|
||||
You can integrate a `pydase` client proxy within another service. Here's an example of how to set this up:
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
# Initialize the client without blocking the constructor
|
||||
proxy = pydase.Client(hostname="<ip_addr>", port=8001, block_until_connected=False).proxy
|
||||
proxy = pydase.Client(
|
||||
url="ws://<ip_addr>:<service_port>",
|
||||
block_until_connected=False
|
||||
).proxy
|
||||
# For SSL-encrypted services, use the wss protocol
|
||||
# proxy = pydase.Client(
|
||||
# url="wss://your-domain.ch",
|
||||
# block_until_connected=False
|
||||
# ).proxy
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
# Create a server that exposes this service; adjust the web_port as needed
|
||||
server = pydase.Server(service, web_port=8002). run()
|
||||
# Create a server that exposes this service
|
||||
server = pydase.Server(service, web_port=8002).run()
|
||||
```
|
||||
|
||||
In this setup, the `MyService` class has a `proxy` attribute that connects to a `pydase` service located at `<ip_addr>:8001`.
|
||||
The `block_until_connected=False` argument allows the service to start up even if the initial connection attempt fails.
|
||||
This configuration is particularly useful in distributed systems where services may start in any order.
|
||||
In this example:
|
||||
- The `MyService` class has a `proxy` attribute that connects to a `pydase` service at `<ip_addr>:<service_port>`.
|
||||
- By setting `block_until_connected=False`, the service can start without waiting for the connection to succeed, which is particularly useful in distributed systems where services may initialize in any order.
|
||||
|
||||
## Custom `socketio.AsyncClient` Connection Parameters
|
||||
|
||||
You can also configure advanced connection options by passing additional arguments to the underlying [`AsyncClient`][socketio.AsyncClient] via `sio_client_kwargs`. This allows you to fine-tune reconnection behaviour, delays, and other settings:
|
||||
|
||||
```python
|
||||
client = pydase.Client(
|
||||
url="ws://localhost:8001",
|
||||
sio_client_kwargs={
|
||||
"reconnection_attempts": 3,
|
||||
"reconnection_delay": 2,
|
||||
"reconnection_delay_max": 10,
|
||||
}
|
||||
).proxy
|
||||
```
|
||||
|
||||
In this setup, the client will attempt to reconnect three times, with an initial delay of 2 seconds (each successive attempt doubles this delay) and a maximum delay of 10 seconds between attempts.
|
||||
|
||||
@@ -3,11 +3,18 @@
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta name="description" content="Web site displaying a pydase UI." />
|
||||
</head>
|
||||
|
||||
<script>
|
||||
// this will be set by the python backend if the service is behind a proxy which strips a prefix. The frontend can use this to build the paths to the resources.
|
||||
window.__FORWARDED_PREFIX__ = "";
|
||||
window.__FORWARDED_PROTO__ = "";
|
||||
</script>`
|
||||
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
|
||||
BIN
frontend/public/favicon.ico
Normal file
|
After Width: | Height: | Size: 77 KiB |
@@ -1,6 +1,6 @@
|
||||
import { useCallback, useEffect, useReducer, useState } from "react";
|
||||
import { Navbar, Form, Offcanvas, Container } from "react-bootstrap";
|
||||
import { hostname, port, socket } from "./socket";
|
||||
import { authority, socket, forwardedProto } from "./socket";
|
||||
import "./App.css";
|
||||
import {
|
||||
Notifications,
|
||||
@@ -68,12 +68,12 @@ const App = () => {
|
||||
|
||||
useEffect(() => {
|
||||
// Allow the user to add a custom css file
|
||||
fetch(`http://${hostname}:${port}/custom.css`)
|
||||
fetch(`${forwardedProto}://${authority}/custom.css`, { credentials: "include" })
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
// If the file exists, create a link element for the custom CSS
|
||||
const link = document.createElement("link");
|
||||
link.href = `http://${hostname}:${port}/custom.css`;
|
||||
link.href = `${forwardedProto}://${authority}/custom.css`;
|
||||
link.type = "text/css";
|
||||
link.rel = "stylesheet";
|
||||
document.head.appendChild(link);
|
||||
@@ -83,7 +83,9 @@ const App = () => {
|
||||
|
||||
socket.on("connect", () => {
|
||||
// Fetch data from the API when the client connects
|
||||
fetch(`http://${hostname}:${port}/service-properties`)
|
||||
fetch(`${forwardedProto}://${authority}/service-properties`, {
|
||||
credentials: "include",
|
||||
})
|
||||
.then((response) => response.json())
|
||||
.then((data: State) => {
|
||||
dispatch({ type: "SET_DATA", data });
|
||||
@@ -91,7 +93,7 @@ const App = () => {
|
||||
|
||||
document.title = data.name; // Setting browser tab title
|
||||
});
|
||||
fetch(`http://${hostname}:${port}/web-settings`)
|
||||
fetch(`${forwardedProto}://${authority}/web-settings`, { credentials: "include" })
|
||||
.then((response) => response.json())
|
||||
.then((data: Record<string, WebSetting>) => setWebSettings(data));
|
||||
setConnectionStatus("connected");
|
||||
|
||||
@@ -4,7 +4,6 @@ import { NumberComponent, NumberObject } from "./NumberComponent";
|
||||
import { SliderComponent } from "./SliderComponent";
|
||||
import { EnumComponent } from "./EnumComponent";
|
||||
import { MethodComponent } from "./MethodComponent";
|
||||
import { AsyncMethodComponent } from "./AsyncMethodComponent";
|
||||
import { StringComponent } from "./StringComponent";
|
||||
import { ListComponent } from "./ListComponent";
|
||||
import { DataServiceComponent, DataServiceJSON } from "./DataServiceComponent";
|
||||
@@ -17,6 +16,7 @@ import { updateValue } from "../socket";
|
||||
import { DictComponent } from "./DictComponent";
|
||||
import { parseFullAccessPath } from "../utils/stateUtils";
|
||||
import { SerializedEnum, SerializedObject } from "../types/SerializedObject";
|
||||
import { TaskComponent, TaskStatus } from "./TaskComponent";
|
||||
|
||||
interface GenericComponentProps {
|
||||
attribute: SerializedObject;
|
||||
@@ -144,30 +144,16 @@ export const GenericComponent = React.memo(
|
||||
/>
|
||||
);
|
||||
} else if (attribute.type === "method") {
|
||||
if (!attribute.async) {
|
||||
return (
|
||||
<MethodComponent
|
||||
fullAccessPath={fullAccessPath}
|
||||
docString={attribute.doc}
|
||||
addNotification={addNotification}
|
||||
displayName={displayName}
|
||||
id={id}
|
||||
render={attribute.frontend_render}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<AsyncMethodComponent
|
||||
fullAccessPath={fullAccessPath}
|
||||
docString={attribute.doc}
|
||||
value={attribute.value as "RUNNING" | null}
|
||||
addNotification={addNotification}
|
||||
displayName={displayName}
|
||||
id={id}
|
||||
render={attribute.frontend_render}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<MethodComponent
|
||||
fullAccessPath={fullAccessPath}
|
||||
docString={attribute.doc}
|
||||
addNotification={addNotification}
|
||||
displayName={displayName}
|
||||
id={id}
|
||||
render={attribute.frontend_render}
|
||||
/>
|
||||
);
|
||||
} else if (attribute.type === "str") {
|
||||
return (
|
||||
<StringComponent
|
||||
@@ -182,6 +168,17 @@ export const GenericComponent = React.memo(
|
||||
id={id}
|
||||
/>
|
||||
);
|
||||
} else if (attribute.type == "Task") {
|
||||
return (
|
||||
<TaskComponent
|
||||
fullAccessPath={fullAccessPath}
|
||||
docString={attribute.doc}
|
||||
status={attribute.value["status"].value as TaskStatus}
|
||||
addNotification={addNotification}
|
||||
displayName={displayName}
|
||||
id={id}
|
||||
/>
|
||||
);
|
||||
} else if (attribute.type === "DataService") {
|
||||
return (
|
||||
<DataServiceComponent
|
||||
|
||||
@@ -132,6 +132,8 @@ const handleNumericKey = (
|
||||
selectionStart: number,
|
||||
selectionEnd: number,
|
||||
) => {
|
||||
let newValue = value;
|
||||
|
||||
// Check if a number key or a decimal point key is pressed
|
||||
if (key === "." && value.includes(".")) {
|
||||
// Check if value already contains a decimal. If so, ignore input.
|
||||
@@ -139,14 +141,34 @@ const handleNumericKey = (
|
||||
return { value, selectionStart };
|
||||
}
|
||||
|
||||
let newValue = value;
|
||||
// Handle minus sign input
|
||||
if (key === "-") {
|
||||
if (selectionStart === 0 && selectionEnd > selectionStart) {
|
||||
// Replace selection with minus if selection starts at 0
|
||||
newValue = "-" + value.slice(selectionEnd);
|
||||
selectionStart = 1;
|
||||
} else if (selectionStart === 0 && !value.startsWith("-")) {
|
||||
// Add minus at the beginning if it doesn't exist
|
||||
newValue = "-" + value;
|
||||
selectionStart = 1;
|
||||
} else if (
|
||||
(selectionStart === 0 || selectionStart === 1) &&
|
||||
value.startsWith("-")
|
||||
) {
|
||||
// Remove minus if it exists
|
||||
newValue = value.slice(1);
|
||||
selectionStart = 0;
|
||||
}
|
||||
|
||||
return { value: newValue, selectionStart };
|
||||
}
|
||||
|
||||
// Add the new key at the cursor's position
|
||||
if (selectionEnd > selectionStart) {
|
||||
// If there is a selection, replace it with the key
|
||||
newValue = value.slice(0, selectionStart) + key + value.slice(selectionEnd);
|
||||
} else {
|
||||
// otherwise, append the key after the selection start
|
||||
// Otherwise, insert the key at the cursor position
|
||||
newValue = value.slice(0, selectionStart) + key + value.slice(selectionStart);
|
||||
}
|
||||
|
||||
@@ -201,17 +223,7 @@ export const NumberComponent = React.memo((props: NumberComponentProps) => {
|
||||
// Select everything when pressing Ctrl + a
|
||||
inputTarget.setSelectionRange(0, value.length);
|
||||
return;
|
||||
} else if (key === "-") {
|
||||
if (selectionStart === 0 && !value.startsWith("-")) {
|
||||
newValue = "-" + value;
|
||||
selectionStart++;
|
||||
} else if (value.startsWith("-") && selectionStart === 1) {
|
||||
newValue = value.substring(1); // remove minus sign
|
||||
selectionStart--;
|
||||
} else {
|
||||
return; // Ignore "-" pressed in other positions
|
||||
}
|
||||
} else if (key >= "0" && key <= "9") {
|
||||
} else if ((key >= "0" && key <= "9") || key === "-") {
|
||||
// Check if a number key or a decimal point key is pressed
|
||||
({ value: newValue, selectionStart } = handleNumericKey(
|
||||
key,
|
||||
|
||||
@@ -5,67 +5,51 @@ import { DocStringComponent } from "./DocStringComponent";
|
||||
import { LevelName } from "./NotificationsComponent";
|
||||
import useRenderCount from "../hooks/useRenderCount";
|
||||
|
||||
interface AsyncMethodProps {
|
||||
export type TaskStatus = "RUNNING" | "NOT_RUNNING";
|
||||
|
||||
interface TaskProps {
|
||||
fullAccessPath: string;
|
||||
value: "RUNNING" | null;
|
||||
docString: string | null;
|
||||
hideOutput?: boolean;
|
||||
status: TaskStatus;
|
||||
addNotification: (message: string, levelname?: LevelName) => void;
|
||||
displayName: string;
|
||||
id: string;
|
||||
render: boolean;
|
||||
}
|
||||
|
||||
export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
const {
|
||||
fullAccessPath,
|
||||
docString,
|
||||
value: runningTask,
|
||||
addNotification,
|
||||
displayName,
|
||||
id,
|
||||
} = props;
|
||||
|
||||
// Conditional rendering based on the 'render' prop.
|
||||
if (!props.render) {
|
||||
return null;
|
||||
}
|
||||
export const TaskComponent = React.memo((props: TaskProps) => {
|
||||
const { fullAccessPath, docString, status, addNotification, displayName, id } = props;
|
||||
|
||||
const renderCount = useRenderCount();
|
||||
const formRef = useRef(null);
|
||||
const [spinning, setSpinning] = useState(false);
|
||||
const name = fullAccessPath.split(".").at(-1)!;
|
||||
const parentPath = fullAccessPath.slice(0, -(name.length + 1));
|
||||
|
||||
useEffect(() => {
|
||||
let message: string;
|
||||
|
||||
if (runningTask === null) {
|
||||
message = `${fullAccessPath} task was stopped.`;
|
||||
} else {
|
||||
if (status === "RUNNING") {
|
||||
message = `${fullAccessPath} was started.`;
|
||||
} else {
|
||||
message = `${fullAccessPath} was stopped.`;
|
||||
}
|
||||
|
||||
addNotification(message);
|
||||
setSpinning(false);
|
||||
}, [props.value]);
|
||||
}, [status]);
|
||||
|
||||
const execute = async (event: React.FormEvent) => {
|
||||
event.preventDefault();
|
||||
let method_name: string;
|
||||
|
||||
if (runningTask !== undefined && runningTask !== null) {
|
||||
method_name = `stop_${name}`;
|
||||
} else {
|
||||
method_name = `start_${name}`;
|
||||
}
|
||||
const method_name = status == "RUNNING" ? "stop" : "start";
|
||||
|
||||
const accessPath = [parentPath, method_name].filter((element) => element).join(".");
|
||||
const accessPath = [fullAccessPath, method_name]
|
||||
.filter((element) => element)
|
||||
.join(".");
|
||||
setSpinning(true);
|
||||
runMethod(accessPath);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="component asyncMethodComponent" id={id}>
|
||||
<div className="component taskComponent" id={id}>
|
||||
{process.env.NODE_ENV === "development" && <div>Render count: {renderCount}</div>}
|
||||
<Form onSubmit={execute} ref={formRef}>
|
||||
<InputGroup>
|
||||
@@ -76,7 +60,7 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
<Button id={`button-${id}`} type="submit">
|
||||
{spinning ? (
|
||||
<Spinner size="sm" role="status" aria-hidden="true" />
|
||||
) : runningTask === "RUNNING" ? (
|
||||
) : status === "RUNNING" ? (
|
||||
"Stop "
|
||||
) : (
|
||||
"Start "
|
||||
@@ -88,4 +72,4 @@ export const AsyncMethodComponent = React.memo((props: AsyncMethodProps) => {
|
||||
);
|
||||
});
|
||||
|
||||
AsyncMethodComponent.displayName = "AsyncMethodComponent";
|
||||
TaskComponent.displayName = "TaskComponent";
|
||||
@@ -2,14 +2,29 @@ import { io } from "socket.io-client";
|
||||
import { serializeDict, serializeList } from "./utils/serializationUtils";
|
||||
import { SerializedObject } from "./types/SerializedObject";
|
||||
|
||||
export const hostname =
|
||||
const hostname =
|
||||
process.env.NODE_ENV === "development" ? `localhost` : window.location.hostname;
|
||||
export const port =
|
||||
process.env.NODE_ENV === "development" ? 8001 : window.location.port;
|
||||
const URL = `ws://${hostname}:${port}/`;
|
||||
console.debug("Websocket: ", URL);
|
||||
const port = process.env.NODE_ENV === "development" ? 8001 : window.location.port;
|
||||
|
||||
export const socket = io(URL, { path: "/ws/socket.io", transports: ["websocket"] });
|
||||
// Get the forwarded prefix from the global variable
|
||||
export const forwardedPrefix: string =
|
||||
(window as any) /* eslint-disable-line @typescript-eslint/no-explicit-any */
|
||||
.__FORWARDED_PREFIX__ || "";
|
||||
// Get the forwarded protocol type from the global variable
|
||||
export const forwardedProto: string =
|
||||
(window as any) /* eslint-disable-line @typescript-eslint/no-explicit-any */
|
||||
.__FORWARDED_PROTO__ || "http";
|
||||
|
||||
export const authority = `${hostname}:${port}${forwardedPrefix}`;
|
||||
|
||||
const wsProto = forwardedProto === "http" ? "ws" : "wss";
|
||||
|
||||
const URL = `${wsProto}://${hostname}:${port}/`;
|
||||
console.debug("Websocket: ", URL);
|
||||
export const socket = io(URL, {
|
||||
path: `${forwardedPrefix}/ws/socket.io`,
|
||||
transports: ["websocket"],
|
||||
});
|
||||
|
||||
export const updateValue = (
|
||||
serializedObject: SerializedObject,
|
||||
|
||||
@@ -77,7 +77,12 @@ type SerializedException = SerializedObjectBase & {
|
||||
type: "Exception";
|
||||
};
|
||||
|
||||
type DataServiceTypes = "DataService" | "Image" | "NumberSlider" | "DeviceConnection";
|
||||
type DataServiceTypes =
|
||||
| "DataService"
|
||||
| "Image"
|
||||
| "NumberSlider"
|
||||
| "DeviceConnection"
|
||||
| "Task";
|
||||
|
||||
type SerializedDataService = SerializedObjectBase & {
|
||||
name: string;
|
||||
|
||||
44
mkdocs.yml
@@ -6,7 +6,14 @@ nav:
|
||||
- Getting Started: getting-started.md
|
||||
- User Guide:
|
||||
- Components Guide: user-guide/Components.md
|
||||
- Interacting with pydase Services: user-guide/interaction/main.md
|
||||
- Interacting with pydase Services: user-guide/interaction/README.md
|
||||
- Achieving Service Persistence: user-guide/Service_Persistence.md
|
||||
- Understanding Tasks: user-guide/Tasks.md
|
||||
- Understanding Units: user-guide/Understanding-Units.md
|
||||
- Validating Property Setters: user-guide/Validating-Property-Setters.md
|
||||
- Configuring pydase: user-guide/Configuration.md
|
||||
- Advanced:
|
||||
- Deploying behind a Reverse Proxy: user-guide/advanced/Reverse-Proxy.md
|
||||
- Developer Guide:
|
||||
- Developer Guide: dev-guide/README.md
|
||||
- API Reference: dev-guide/api.md
|
||||
@@ -18,6 +25,7 @@ nav:
|
||||
- License: about/license.md
|
||||
|
||||
theme:
|
||||
logo: images/logo-colour.png
|
||||
name: material
|
||||
features:
|
||||
- content.code.copy
|
||||
@@ -40,10 +48,36 @@ markdown_extensions:
|
||||
|
||||
|
||||
plugins:
|
||||
- include-markdown
|
||||
- search
|
||||
- mkdocstrings
|
||||
- swagger-ui-tag
|
||||
- include-markdown
|
||||
- search
|
||||
- mkdocstrings:
|
||||
handlers:
|
||||
python:
|
||||
paths: [src] # search packages in the src folder
|
||||
import:
|
||||
- https://docs.python.org/3/objects.inv
|
||||
- https://docs.pydantic.dev/latest/objects.inv
|
||||
- https://confz.readthedocs.io/en/latest/objects.inv
|
||||
- https://python-socketio.readthedocs.io/en/stable/objects.inv
|
||||
options:
|
||||
show_source: true
|
||||
inherited_members: true
|
||||
merge_init_into_class: true
|
||||
show_signature_annotations: true
|
||||
signature_crossrefs: true
|
||||
separate_signature: true
|
||||
docstring_options:
|
||||
ignore_init_summary: true
|
||||
# docstring_section_style: list
|
||||
heading_level: 2
|
||||
parameter_headings: true
|
||||
show_root_heading: true
|
||||
show_root_full_path: true
|
||||
show_symbol_type_heading: true
|
||||
show_symbol_type_toc: true
|
||||
# summary: true
|
||||
unwrap_annotated: true
|
||||
- swagger-ui-tag
|
||||
|
||||
watch:
|
||||
- src/pydase
|
||||
|
||||
556
poetry.lock
generated
@@ -1,91 +1,103 @@
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
version = "2.3.4"
|
||||
description = "Happy Eyeballs for asyncio"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "aiohappyeyeballs-2.3.4-py3-none-any.whl", hash = "sha256:40a16ceffcf1fc9e142fd488123b2e218abc4188cf12ac20c67200e1579baa42"},
|
||||
{file = "aiohappyeyeballs-2.3.4.tar.gz", hash = "sha256:7e1ae8399c320a8adec76f6c919ed5ceae6edd4c3672f4d9eae2b27e37c80ff6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.9.5"
|
||||
version = "3.10.1"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
|
||||
{file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"},
|
||||
{file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"},
|
||||
{file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2212296cdb63b092e295c3e4b4b442e7b7eb41e8a30d0f53c16d5962efed395d"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4dcb127ca3eb0a61205818a606393cbb60d93b7afb9accd2fd1e9081cc533144"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb8b79a65332e1a426ccb6290ce0409e1dc16b4daac1cc5761e059127fa3d134"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc24f707ed9cb961f6ee04020ca01de2c89b2811f3cf3361dc7c96a14bfbcc"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cb54f5725b4b37af12edf6c9e834df59258c82c15a244daa521a065fbb11717"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d03e948e53b3639ce4d438f3d1d8202898ec6655cadcc09ec99229d4adc2a9"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786299d719eb5d868f161aeec56d589396b053925b7e0ce36e983d30d0a3e55c"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda4009a30d51d3f06f36bc7411a62b3e647fa6cc935ef667e3e3d3a7dd09b1"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67f7639424c313125213954e93a6229d3a1d386855d70c292a12628f600c7150"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e5a26d7aac4c0d8414a347da162696eea0629fdce939ada6aedf951abb1d745"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:120548d89f14b76a041088b582454d89389370632ee12bf39d919cc5c561d1ca"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f5293726943bdcea24715b121d8c4ae12581441d22623b0e6ab12d07ce85f9c4"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f8605e573ed6c44ec689d94544b2c4bb1390aaa723a8b5a2cc0a5a485987a68"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-win32.whl", hash = "sha256:e7168782621be4448d90169a60c8b37e9b0926b3b79b6097bc180c0a8a119e73"},
|
||||
{file = "aiohttp-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbf8c0ded367c5c8eaf585f85ca8dd85ff4d5b73fb8fe1e6ac9e1b5e62e11f7"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:54b7f4a20d7cc6bfa4438abbde069d417bb7a119f870975f78a2b99890226d55"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fa643ca990323db68911b92f3f7a0ca9ae300ae340d0235de87c523601e58d9"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8311d0d690487359fe2247ec5d2cac9946e70d50dced8c01ce9e72341c21151"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222821c60b8f6a64c5908cb43d69c0ee978a1188f6a8433d4757d39231b42cdb"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7b55d9ede66af7feb6de87ff277e0ccf6d51c7db74cc39337fe3a0e31b5872d"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a95151a5567b3b00368e99e9c5334a919514f60888a6b6d2054fea5e66e527e"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9e9171d2fe6bfd9d3838a6fe63b1e91b55e0bf726c16edf265536e4eafed19"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a57e73f9523e980f6101dc9a83adcd7ac0006ea8bf7937ca3870391c7bb4f8ff"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0df51a3d70a2bfbb9c921619f68d6d02591f24f10e9c76de6f3388c89ed01de6"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b0de63ff0307eac3961b4af74382d30220d4813f36b7aaaf57f063a1243b4214"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8db9b749f589b5af8e4993623dbda6716b2b7a5fcb0fa2277bf3ce4b278c7059"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6b14c19172eb53b63931d3e62a9749d6519f7c121149493e6eefca055fcdb352"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cd57ad998e3038aa87c38fe85c99ed728001bf5dde8eca121cadee06ee3f637"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-win32.whl", hash = "sha256:df31641e3f02b77eb3c5fb63c0508bee0fc067cf153da0e002ebbb0db0b6d91a"},
|
||||
{file = "aiohttp-3.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:93094eba50bc2ad4c40ff4997ead1fdcd41536116f2e7d6cfec9596a8ecb3615"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"},
|
||||
{file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"},
|
||||
{file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohappyeyeballs = ">=2.3.0"
|
||||
aiosignal = ">=1.1.2"
|
||||
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
|
||||
attrs = ">=17.3.0"
|
||||
@@ -94,7 +106,7 @@ multidict = ">=4.5,<7.0"
|
||||
yarl = ">=1.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli", "aiodns", "brotlicffi"]
|
||||
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp-middlewares"
|
||||
@@ -137,6 +149,28 @@ files = [
|
||||
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.6.0"
|
||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"},
|
||||
{file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
|
||||
idna = ">=2.8"
|
||||
sniffio = ">=1.1"
|
||||
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"]
|
||||
trio = ["trio (>=0.26.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
@@ -161,22 +195,22 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "23.2.0"
|
||||
version = "24.1.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
||||
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
||||
{file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"},
|
||||
{file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||
dev = ["attrs[tests]", "pre-commit"]
|
||||
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
|
||||
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
|
||||
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "babel"
|
||||
@@ -441,63 +475,83 @@ test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.6.0"
|
||||
version = "7.6.1"
|
||||
description = "Code coverage measurement for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"},
|
||||
{file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"},
|
||||
{file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"},
|
||||
{file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"},
|
||||
{file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"},
|
||||
{file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"},
|
||||
{file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"},
|
||||
{file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
|
||||
{file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
|
||||
{file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
|
||||
{file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
|
||||
{file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
|
||||
{file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
|
||||
{file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
|
||||
{file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
|
||||
{file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
|
||||
{file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -737,6 +791,20 @@ python-dateutil = ">=2.8.1"
|
||||
[package.extras]
|
||||
dev = ["flake8", "markdown", "twine", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "griffe"
|
||||
version = "1.1.0"
|
||||
description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "griffe-1.1.0-py3-none-any.whl", hash = "sha256:38ccc5721571c95ae427123074cf0dc0d36bce7c9701ab2ada9fe0566ff50c10"},
|
||||
{file = "griffe-1.1.0.tar.gz", hash = "sha256:c6328cbdec0d449549c1cc332f59227cd5603f903479d73e4425d828b782ffc3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = ">=0.4"
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
@@ -986,40 +1054,40 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib"
|
||||
version = "3.9.1"
|
||||
version = "3.9.0"
|
||||
description = "Python plotting package"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7ccd6270066feb9a9d8e0705aa027f1ff39f354c72a87efe8fa07632f30fc6bb"},
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:591d3a88903a30a6d23b040c1e44d1afdd0d778758d07110eb7596f811f31842"},
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2a59ff4b83d33bca3b5ec58203cc65985367812cb8c257f3e101632be86d92"},
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fc001516ffcf1a221beb51198b194d9230199d6842c540108e4ce109ac05cc0"},
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:83c6a792f1465d174c86d06f3ae85a8fe36e6f5964633ae8106312ec0921fdf5"},
|
||||
{file = "matplotlib-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:421851f4f57350bcf0811edd754a708d2275533e84f52f6760b740766c6747a7"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b3fce58971b465e01b5c538f9d44915640c20ec5ff31346e963c9e1cd66fa812"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a973c53ad0668c53e0ed76b27d2eeeae8799836fd0d0caaa4ecc66bf4e6676c0"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd5acf8f3ef43f7532c2f230249720f5dc5dd40ecafaf1c60ac8200d46d7eb"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab38a4f3772523179b2f772103d8030215b318fef6360cb40558f585bf3d017f"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2315837485ca6188a4b632c5199900e28d33b481eb083663f6a44cfc8987ded3"},
|
||||
{file = "matplotlib-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0c977c5c382f6696caf0bd277ef4f936da7e2aa202ff66cad5f0ac1428ee15b"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:565d572efea2b94f264dd86ef27919515aa6d629252a169b42ce5f570db7f37b"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d397fd8ccc64af2ec0af1f0efc3bacd745ebfb9d507f3f552e8adb689ed730a"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26040c8f5121cd1ad712abffcd4b5222a8aec3a0fe40bc8542c94331deb8780d"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cb1837cffaac087ad6b44399d5e22b78c729de3cdae4629e252067b705e2b"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0e835c6988edc3d2d08794f73c323cc62483e13df0194719ecb0723b564e0b5c"},
|
||||
{file = "matplotlib-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:44a21d922f78ce40435cb35b43dd7d573cf2a30138d5c4b709d19f00e3907fd7"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0c584210c755ae921283d21d01f03a49ef46d1afa184134dd0f95b0202ee6f03"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11fed08f34fa682c2b792942f8902e7aefeed400da71f9e5816bea40a7ce28fe"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0000354e32efcfd86bda75729716b92f5c2edd5b947200be9881f0a671565c33"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db17fea0ae3aceb8e9ac69c7e3051bae0b3d083bfec932240f9bf5d0197a049"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:208cbce658b72bf6a8e675058fbbf59f67814057ae78165d8a2f87c45b48d0ff"},
|
||||
{file = "matplotlib-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:dc23f48ab630474264276be156d0d7710ac6c5a09648ccdf49fef9200d8cbe80"},
|
||||
{file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3fda72d4d472e2ccd1be0e9ccb6bf0d2eaf635e7f8f51d737ed7e465ac020cb3"},
|
||||
{file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:84b3ba8429935a444f1fdc80ed930babbe06725bcf09fbeb5c8757a2cd74af04"},
|
||||
{file = "matplotlib-3.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b918770bf3e07845408716e5bbda17eadfc3fcbd9307dc67f37d6cf834bb3d98"},
|
||||
{file = "matplotlib-3.9.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f1f2e5d29e9435c97ad4c36fb6668e89aee13d48c75893e25cef064675038ac9"},
|
||||
{file = "matplotlib-3.9.1.tar.gz", hash = "sha256:de06b19b8db95dd33d0dc17c926c7c9ebed9f572074b6fac4f65068a6814d010"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"},
|
||||
{file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"},
|
||||
{file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"},
|
||||
{file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"},
|
||||
{file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"},
|
||||
{file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"},
|
||||
{file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"},
|
||||
{file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"},
|
||||
{file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"},
|
||||
{file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1126,13 +1194,13 @@ test = ["mkdocs (==1.4.0)", "pytest (==7.1.3)", "pytest-cov (==3.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-material"
|
||||
version = "9.5.30"
|
||||
version = "9.5.31"
|
||||
description = "Documentation that simply works"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocs_material-9.5.30-py3-none-any.whl", hash = "sha256:fc070689c5250a180e9b9d79d8491ef9a3a7acb240db0728728d6c31eeb131d4"},
|
||||
{file = "mkdocs_material-9.5.30.tar.gz", hash = "sha256:3fd417dd42d679e3ba08b9e2d72cd8b8af142cc4a3969676ad6b00993dd182ec"},
|
||||
{file = "mkdocs_material-9.5.31-py3-none-any.whl", hash = "sha256:1b1f49066fdb3824c1e96d6bacd2d4375de4ac74580b47e79ff44c4d835c5fcb"},
|
||||
{file = "mkdocs_material-9.5.31.tar.gz", hash = "sha256:31833ec664772669f5856f4f276bf3fdf0e642a445e64491eda459249c3a1ca8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1180,21 +1248,24 @@ beautifulsoup4 = ">=4.11.1"
|
||||
|
||||
[[package]]
|
||||
name = "mkdocstrings"
|
||||
version = "0.22.0"
|
||||
version = "0.25.2"
|
||||
description = "Automatic documentation from sources, for MkDocs."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocstrings-0.22.0-py3-none-any.whl", hash = "sha256:2d4095d461554ff6a778fdabdca3c00c468c2f1459d469f7a7f622a2b23212ba"},
|
||||
{file = "mkdocstrings-0.22.0.tar.gz", hash = "sha256:82a33b94150ebb3d4b5c73bab4598c3e21468c79ec072eff6931c8f3bfc38256"},
|
||||
{file = "mkdocstrings-0.25.2-py3-none-any.whl", hash = "sha256:9e2cda5e2e12db8bb98d21e3410f3f27f8faab685a24b03b06ba7daa5b92abfc"},
|
||||
{file = "mkdocstrings-0.25.2.tar.gz", hash = "sha256:5cf57ad7f61e8be3111a2458b4e49c2029c9cb35525393b179f9c916ca8042dc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=7.0"
|
||||
Jinja2 = ">=2.11.1"
|
||||
Markdown = ">=3.3"
|
||||
MarkupSafe = ">=1.1"
|
||||
mkdocs = ">=1.2"
|
||||
mkdocs = ">=1.4"
|
||||
mkdocs-autorefs = ">=0.3.1"
|
||||
mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""}
|
||||
platformdirs = ">=2.2.0"
|
||||
pymdown-extensions = ">=6.3"
|
||||
|
||||
[package.extras]
|
||||
@@ -1202,6 +1273,21 @@ crystal = ["mkdocstrings-crystal (>=0.3.4)"]
|
||||
python = ["mkdocstrings-python (>=0.5.2)"]
|
||||
python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocstrings-python"
|
||||
version = "1.10.8"
|
||||
description = "A Python handler for mkdocstrings."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocstrings_python-1.10.8-py3-none-any.whl", hash = "sha256:bb12e76c8b071686617f824029cb1dfe0e9afe89f27fb3ad9a27f95f054dcd89"},
|
||||
{file = "mkdocstrings_python-1.10.8.tar.gz", hash = "sha256:5856a59cbebbb8deb133224a540de1ff60bded25e54d8beacc375bb133d39016"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
griffe = ">=0.49"
|
||||
mkdocstrings = ">=0.25"
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.0.5"
|
||||
@@ -1303,38 +1389,38 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.11.0"
|
||||
version = "1.11.1"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"},
|
||||
{file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"},
|
||||
{file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"},
|
||||
{file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"},
|
||||
{file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"},
|
||||
{file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"},
|
||||
{file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"},
|
||||
{file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"},
|
||||
{file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"},
|
||||
{file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"},
|
||||
{file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"},
|
||||
{file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"},
|
||||
{file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"},
|
||||
{file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"},
|
||||
{file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"},
|
||||
{file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"},
|
||||
{file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"},
|
||||
{file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"},
|
||||
{file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"},
|
||||
{file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"},
|
||||
{file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"},
|
||||
{file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"},
|
||||
{file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"},
|
||||
{file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"},
|
||||
{file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"},
|
||||
{file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"},
|
||||
{file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"},
|
||||
{file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"},
|
||||
{file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"},
|
||||
{file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"},
|
||||
{file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"},
|
||||
{file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"},
|
||||
{file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"},
|
||||
{file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"},
|
||||
{file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"},
|
||||
{file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"},
|
||||
{file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"},
|
||||
{file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"},
|
||||
{file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"},
|
||||
{file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"},
|
||||
{file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"},
|
||||
{file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"},
|
||||
{file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"},
|
||||
{file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"},
|
||||
{file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"},
|
||||
{file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"},
|
||||
{file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"},
|
||||
{file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"},
|
||||
{file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"},
|
||||
{file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"},
|
||||
{file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"},
|
||||
{file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"},
|
||||
{file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"},
|
||||
{file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1784,13 +1870,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.373"
|
||||
version = "1.1.374"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyright-1.1.373-py3-none-any.whl", hash = "sha256:b805413227f2c209f27b14b55da27fe5e9fb84129c9f1eb27708a5d12f6f000e"},
|
||||
{file = "pyright-1.1.373.tar.gz", hash = "sha256:f41bcfc8b9d1802b09921a394d6ae1ce19694957b628bc657629688daf8a83ff"},
|
||||
{file = "pyright-1.1.374-py3-none-any.whl", hash = "sha256:55752bcf7a3646d293cd76710a983b71e16f6128aab2d42468e6eb7e46c0a70d"},
|
||||
{file = "pyright-1.1.374.tar.gz", hash = "sha256:d01b2daf864ba5e0362e56b844984865970d7204158e61eb685e2dab7804cb82"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2127,28 +2213,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.2.2"
|
||||
version = "0.5.6"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"},
|
||||
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"},
|
||||
{file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"},
|
||||
{file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"},
|
||||
{file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"},
|
||||
{file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"},
|
||||
{file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"},
|
||||
{file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"},
|
||||
{file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"},
|
||||
{file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"},
|
||||
{file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"},
|
||||
{file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"},
|
||||
{file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"},
|
||||
{file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"},
|
||||
{file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"},
|
||||
{file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"},
|
||||
{file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"},
|
||||
{file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2179,6 +2266,17 @@ files = [
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
description = "Sniff out which async library your code is running under"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
|
||||
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.5"
|
||||
@@ -2431,4 +2529,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "80dda5533cf7111fd83407e0cce9228a99af644a6ffab4fa1abe085f4272cabf"
|
||||
content-hash = "011b118225386513fc1c953c02bc1d58e40c198313de2a1f76183dd61ab9eec6"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "pydase"
|
||||
version = "0.8.5"
|
||||
version = "0.10.7"
|
||||
description = "A flexible and robust Python library for creating, managing, and interacting with data services, with built-in support for web and RPC servers, and customizable features for diverse use cases."
|
||||
authors = ["Mose Mueller <mosmuell@ethz.ch>"]
|
||||
readme = "README.md"
|
||||
@@ -17,6 +17,7 @@ websocket-client = "^1.7.0"
|
||||
aiohttp = "^3.9.3"
|
||||
click = "^8.1.7"
|
||||
aiohttp-middlewares = "^2.3.0"
|
||||
anyio = "^4.6.0"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
@@ -29,9 +30,8 @@ mypy = "^1.4.1"
|
||||
matplotlib = "^3.7.2"
|
||||
pyright = "^1.1.323"
|
||||
pytest-mock = "^3.11.1"
|
||||
ruff = "^0.2.0"
|
||||
ruff = "^0.5.0"
|
||||
pytest-asyncio = "^0.23.2"
|
||||
requests = "^2.32.3"
|
||||
|
||||
[tool.poetry.group.docs]
|
||||
optional = true
|
||||
@@ -39,7 +39,7 @@ optional = true
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
mkdocs-material = "^9.5.30"
|
||||
mkdocs-include-markdown-plugin = "^3.9.1"
|
||||
mkdocstrings = "^0.22.0"
|
||||
mkdocstrings = {extras = ["python"], version = "^0.25.2"}
|
||||
pymdown-extensions = "^10.1"
|
||||
mkdocs-swagger-ui-tag = "^0.6.10"
|
||||
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
from typing import TypedDict, cast
|
||||
import urllib.parse
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, TypedDict, cast
|
||||
|
||||
import socketio # type: ignore
|
||||
|
||||
import pydase.components
|
||||
from pydase.client.proxy_loader import ProxyClassMixin, ProxyLoader
|
||||
from pydase.client.proxy_class import ProxyClass
|
||||
from pydase.client.proxy_loader import ProxyLoader
|
||||
from pydase.utils.serialization.deserializer import loads
|
||||
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import Self
|
||||
else:
|
||||
from typing import Self
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -27,46 +36,6 @@ def asyncio_loop_thread(loop: asyncio.AbstractEventLoop) -> None:
|
||||
loop.run_forever()
|
||||
|
||||
|
||||
class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
||||
"""
|
||||
A proxy class that serves as the interface for interacting with device connections
|
||||
via a socket.io client in an asyncio environment.
|
||||
|
||||
Args:
|
||||
sio_client (socketio.AsyncClient):
|
||||
The socket.io client instance used for asynchronous communication with the
|
||||
pydase service server.
|
||||
loop (asyncio.AbstractEventLoop):
|
||||
The event loop in which the client operations are managed and executed.
|
||||
|
||||
This class is used to create a proxy object that behaves like a local representation
|
||||
of a remote pydase service, facilitating direct interaction as if it were local
|
||||
while actually communicating over network protocols.
|
||||
It can also be used as an attribute of a pydase service itself, e.g.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
proxy = pydase.Client(
|
||||
hostname="...", port=8001, block_until_connected=False
|
||||
).proxy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
server = pydase.Server(service, web_port=8002).run()
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, sio_client: socketio.AsyncClient, loop: asyncio.AbstractEventLoop
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._initialise(sio_client=sio_client, loop=loop)
|
||||
|
||||
|
||||
class Client:
|
||||
"""
|
||||
A client for connecting to a remote pydase service using socket.io. This client
|
||||
@@ -74,62 +43,116 @@ class Client:
|
||||
connection, disconnection, and updates, and ensures that the proxy object is
|
||||
up-to-date with the server state.
|
||||
|
||||
Attributes:
|
||||
proxy (ProxyClass):
|
||||
A proxy object representing the remote service, facilitating interaction as
|
||||
if it were local.
|
||||
|
||||
Args:
|
||||
hostname (str):
|
||||
Hostname of the exposed service this client attempts to connect to.
|
||||
Default is "localhost".
|
||||
port (int):
|
||||
Port of the exposed service this client attempts to connect on.
|
||||
Default is 8001.
|
||||
block_until_connected (bool):
|
||||
url:
|
||||
The URL of the pydase Socket.IO server. This should always contain the
|
||||
protocol and the hostname.
|
||||
block_until_connected:
|
||||
If set to True, the constructor will block until the connection to the
|
||||
service has been established. This is useful for ensuring the client is
|
||||
ready to use immediately after instantiation. Default is True.
|
||||
sio_client_kwargs:
|
||||
Additional keyword arguments passed to the underlying
|
||||
[`AsyncClient`][socketio.AsyncClient]. This allows fine-tuning of the
|
||||
client's behaviour (e.g., reconnection attempts or reconnection delay).
|
||||
Default is an empty dictionary.
|
||||
|
||||
Example:
|
||||
The following example demonstrates a `Client` instance that connects to another
|
||||
pydase service, while customising some of the connection settings for the
|
||||
underlying [`AsyncClient`][socketio.AsyncClient].
|
||||
|
||||
```python
|
||||
pydase.Client(url="ws://localhost:8001", sio_client_kwargs={
|
||||
"reconnection_attempts": 2,
|
||||
"reconnection_delay": 2,
|
||||
"reconnection_delay_max": 8,
|
||||
})
|
||||
```
|
||||
|
||||
When connecting to a server over a secure connection (i.e., the server is using
|
||||
SSL/TLS encryption), make sure that the `wss` protocol is used instead of `ws`:
|
||||
|
||||
```python
|
||||
pydase.Client(url="wss://my-service.example.com")
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hostname: str,
|
||||
port: int,
|
||||
*,
|
||||
url: str,
|
||||
block_until_connected: bool = True,
|
||||
sio_client_kwargs: dict[str, Any] = {},
|
||||
):
|
||||
self._hostname = hostname
|
||||
self._port = port
|
||||
self._sio = socketio.AsyncClient()
|
||||
# Parse the URL to separate base URL and path prefix
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
|
||||
# Construct the base URL without the path
|
||||
self._base_url = urllib.parse.urlunparse(
|
||||
(parsed_url.scheme, parsed_url.netloc, "", "", "", "")
|
||||
)
|
||||
|
||||
# Store the path prefix (e.g., "/service" in "ws://localhost:8081/service")
|
||||
self._path_prefix = parsed_url.path.rstrip("/") # Remove trailing slash if any
|
||||
self._url = url
|
||||
self._sio = socketio.AsyncClient(**sio_client_kwargs)
|
||||
self._loop = asyncio.new_event_loop()
|
||||
self.proxy = ProxyClass(sio_client=self._sio, loop=self._loop)
|
||||
self.proxy = ProxyClass(
|
||||
sio_client=self._sio, loop=self._loop, reconnect=self.connect
|
||||
)
|
||||
"""A proxy object representing the remote service, facilitating interaction as
|
||||
if it were local."""
|
||||
self._thread = threading.Thread(
|
||||
target=asyncio_loop_thread, args=(self._loop,), daemon=True
|
||||
)
|
||||
self._thread.start()
|
||||
self.connect(block_until_connected=block_until_connected)
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.disconnect()
|
||||
|
||||
def connect(self, block_until_connected: bool = True) -> None:
|
||||
connection_future = asyncio.run_coroutine_threadsafe(
|
||||
self._connect(), self._loop
|
||||
)
|
||||
if block_until_connected:
|
||||
connection_future.result()
|
||||
|
||||
def disconnect(self) -> None:
|
||||
connection_future = asyncio.run_coroutine_threadsafe(
|
||||
self._disconnect(), self._loop
|
||||
)
|
||||
connection_future.result()
|
||||
|
||||
async def _connect(self) -> None:
|
||||
logger.debug("Connecting to server '%s:%s' ...", self._hostname, self._port)
|
||||
logger.debug("Connecting to server '%s' ...", self._url)
|
||||
await self._setup_events()
|
||||
await self._sio.connect(
|
||||
f"ws://{self._hostname}:{self._port}",
|
||||
socketio_path="/ws/socket.io",
|
||||
self._base_url,
|
||||
socketio_path=f"{self._path_prefix}/ws/socket.io",
|
||||
transports=["websocket"],
|
||||
retry=True,
|
||||
)
|
||||
|
||||
async def _disconnect(self) -> None:
|
||||
await self._sio.disconnect()
|
||||
|
||||
async def _setup_events(self) -> None:
|
||||
self._sio.on("connect", self._handle_connect)
|
||||
self._sio.on("disconnect", self._handle_disconnect)
|
||||
self._sio.on("notify", self._handle_update)
|
||||
|
||||
async def _handle_connect(self) -> None:
|
||||
logger.debug("Connected to '%s:%s' ...", self._hostname, self._port)
|
||||
logger.debug("Connected to '%s' ...", self._url)
|
||||
serialized_object = cast(
|
||||
SerializedDataService, await self._sio.call("service_serialization")
|
||||
)
|
||||
@@ -137,11 +160,17 @@ class Client:
|
||||
self.proxy, serialized_object=serialized_object
|
||||
)
|
||||
serialized_object["type"] = "DeviceConnection"
|
||||
self.proxy._notify_changed("", loads(serialized_object))
|
||||
if self.proxy._service_representation is not None:
|
||||
# need to use object.__setattr__ to not trigger an observer notification
|
||||
object.__setattr__(self.proxy, "_service_representation", serialized_object)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
self.proxy._service_representation = serialized_object # type: ignore
|
||||
self.proxy._notify_changed("", self.proxy)
|
||||
self.proxy._connected = True
|
||||
|
||||
async def _handle_disconnect(self) -> None:
|
||||
logger.debug("Disconnected from '%s:%s' ...", self._hostname, self._port)
|
||||
logger.debug("Disconnected from '%s' ...", self._url)
|
||||
self.proxy._connected = False
|
||||
|
||||
async def _handle_update(self, data: NotifyDict) -> None:
|
||||
|
||||
112
src/pydase/client/proxy_class.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
import socketio # type: ignore
|
||||
|
||||
import pydase.components
|
||||
from pydase.client.proxy_loader import ProxyClassMixin
|
||||
from pydase.utils.helpers import get_attribute_doc
|
||||
from pydase.utils.serialization.types import SerializedDataService, SerializedObject
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyClass(ProxyClassMixin, pydase.components.DeviceConnection):
|
||||
"""
|
||||
A proxy class that serves as the interface for interacting with device connections
|
||||
via a socket.io client in an asyncio environment.
|
||||
|
||||
Args:
|
||||
sio_client:
|
||||
The socket.io client instance used for asynchronous communication with the
|
||||
pydase service server.
|
||||
loop:
|
||||
The event loop in which the client operations are managed and executed.
|
||||
reconnect:
|
||||
The method that is called periodically when the client is not connected.
|
||||
|
||||
This class is used to create a proxy object that behaves like a local representation
|
||||
of a remote pydase service, facilitating direct interaction as if it were local
|
||||
while actually communicating over network protocols.
|
||||
It can also be used as an attribute of a pydase service itself, e.g.
|
||||
|
||||
```python
|
||||
import pydase
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
proxy = pydase.Client(
|
||||
hostname="...", port=8001, block_until_connected=False
|
||||
).proxy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
server = pydase.Server(service, web_port=8002).run()
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sio_client: socketio.AsyncClient,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
reconnect: Callable[..., None],
|
||||
) -> None:
|
||||
if TYPE_CHECKING:
|
||||
self._service_representation: None | SerializedObject = None
|
||||
|
||||
super().__init__()
|
||||
pydase.components.DeviceConnection.__init__(self)
|
||||
self._initialise(sio_client=sio_client, loop=loop)
|
||||
object.__setattr__(self, "_service_representation", None)
|
||||
self.reconnect = reconnect
|
||||
|
||||
def serialize(self) -> SerializedObject:
|
||||
if self._service_representation is None:
|
||||
serialization_future = cast(
|
||||
asyncio.Future[SerializedDataService],
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self._sio.call("service_serialization"), self._loop
|
||||
),
|
||||
)
|
||||
# need to use object.__setattr__ to not trigger an observer notification
|
||||
object.__setattr__(
|
||||
self, "_service_representation", serialization_future.result()
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
self._service_representation = serialization_future.result()
|
||||
|
||||
device_connection_value = cast(
|
||||
dict[str, SerializedObject],
|
||||
pydase.components.DeviceConnection().serialize()["value"],
|
||||
)
|
||||
|
||||
readonly = False
|
||||
doc = get_attribute_doc(self)
|
||||
obj_name = self.__class__.__name__
|
||||
|
||||
value = {
|
||||
**cast(
|
||||
dict[str, SerializedObject],
|
||||
# need to deepcopy to not overwrite the _service_representation dict
|
||||
# when adding a prefix with add_prefix_to_full_access_path
|
||||
deepcopy(self._service_representation["value"]),
|
||||
),
|
||||
**device_connection_value,
|
||||
}
|
||||
|
||||
return {
|
||||
"full_access_path": "",
|
||||
"name": obj_name,
|
||||
"type": "DeviceConnection",
|
||||
"value": value,
|
||||
"readonly": readonly,
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
def connect(self) -> None:
|
||||
if not self._sio.reconnection or self._sio.reconnection_attempts > 0:
|
||||
self.reconnect(block_until_connected=False)
|
||||
@@ -1,7 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections.abc import Iterable
|
||||
from copy import copy
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import socketio # type: ignore
|
||||
@@ -202,25 +201,8 @@ class ProxyClassMixin:
|
||||
def _handle_serialized_method(
|
||||
self, attr_name: str, serialized_object: SerializedObject
|
||||
) -> None:
|
||||
def add_prefix_to_last_path_element(s: str, prefix: str) -> str:
|
||||
parts = s.split(".")
|
||||
parts[-1] = f"{prefix}_{parts[-1]}"
|
||||
return ".".join(parts)
|
||||
|
||||
if serialized_object["type"] == "method":
|
||||
if serialized_object["async"] is True:
|
||||
start_method = copy(serialized_object)
|
||||
start_method["full_access_path"] = add_prefix_to_last_path_element(
|
||||
start_method["full_access_path"], "start"
|
||||
)
|
||||
stop_method = copy(serialized_object)
|
||||
stop_method["full_access_path"] = add_prefix_to_last_path_element(
|
||||
stop_method["full_access_path"], "stop"
|
||||
)
|
||||
self._add_method_proxy(f"start_{attr_name}", start_method)
|
||||
self._add_method_proxy(f"stop_{attr_name}", stop_method)
|
||||
else:
|
||||
self._add_method_proxy(attr_name, serialized_object)
|
||||
self._add_method_proxy(attr_name, serialized_object)
|
||||
|
||||
def _add_method_proxy(
|
||||
self, attr_name: str, serialized_object: SerializedObject
|
||||
@@ -351,7 +333,7 @@ class ProxyLoader:
|
||||
) -> Any:
|
||||
# Custom types like Components or DataService classes
|
||||
component_class = cast(
|
||||
type, Deserializer.get_component_class(serialized_object["type"])
|
||||
type, Deserializer.get_service_base_class(serialized_object["type"])
|
||||
)
|
||||
class_bases = (
|
||||
ProxyClassMixin,
|
||||
|
||||
@@ -7,58 +7,59 @@ class ColouredEnum(Enum):
|
||||
|
||||
This class extends the standard Enum but requires its values to be valid CSS
|
||||
colour codes. Supported colour formats include:
|
||||
- Hexadecimal colours
|
||||
- Hexadecimal colours with transparency
|
||||
- RGB colours
|
||||
- RGBA colours
|
||||
- HSL colours
|
||||
- HSLA colours
|
||||
- Predefined/Cross-browser colour names
|
||||
|
||||
- Hexadecimal colours
|
||||
- Hexadecimal colours with transparency
|
||||
- RGB colours
|
||||
- RGBA colours
|
||||
- HSL colours
|
||||
- HSLA colours
|
||||
- Predefined/Cross-browser colour names
|
||||
|
||||
Refer to the this website for more details on colour formats:
|
||||
(https://www.w3schools.com/cssref/css_colours_legal.php)
|
||||
|
||||
The behavior of this component in the UI depends on how it's defined in the data
|
||||
service:
|
||||
- As property with a setter or as attribute: Renders as a dropdown menu,
|
||||
allowing users to select and change its value from the frontend.
|
||||
- As property without a setter: Displays as a coloured box with the key of the
|
||||
`ColouredEnum` as text inside, serving as a visual indicator without user
|
||||
interaction.
|
||||
|
||||
- As property with a setter or as attribute: Renders as a dropdown menu, allowing
|
||||
users to select and change its value from the frontend.
|
||||
- As property without a setter: Displays as a coloured box with the key of the
|
||||
`ColouredEnum` as text inside, serving as a visual indicator without user
|
||||
interaction.
|
||||
|
||||
Example:
|
||||
--------
|
||||
```python
|
||||
import pydase.components as pyc
|
||||
import pydase
|
||||
```python
|
||||
import pydase.components as pyc
|
||||
import pydase
|
||||
|
||||
class MyStatus(pyc.ColouredEnum):
|
||||
PENDING = "#FFA500" # Orange
|
||||
RUNNING = "#0000FF80" # Transparent Blue
|
||||
PAUSED = "rgb(169, 169, 169)" # Dark Gray
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)" # Transparent Yellow
|
||||
COMPLETED = "hsl(120, 100%, 50%)" # Green
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)" # Transparent Red
|
||||
CANCELLED = "SlateGray" # Slate Gray
|
||||
class MyStatus(pyc.ColouredEnum):
|
||||
PENDING = "#FFA500" # Orange
|
||||
RUNNING = "#0000FF80" # Transparent Blue
|
||||
PAUSED = "rgb(169, 169, 169)" # Dark Gray
|
||||
RETRYING = "rgba(255, 255, 0, 0.3)" # Transparent Yellow
|
||||
COMPLETED = "hsl(120, 100%, 50%)" # Green
|
||||
FAILED = "hsla(0, 100%, 50%, 0.7)" # Transparent Red
|
||||
CANCELLED = "SlateGray" # Slate Gray
|
||||
|
||||
class StatusExample(pydase.DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
class StatusExample(pydase.DataService):
|
||||
_status = MyStatus.RUNNING
|
||||
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
@property
|
||||
def status(self) -> MyStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# Custom logic here...
|
||||
self._status = value
|
||||
@status.setter
|
||||
def status(self, value: MyStatus) -> None:
|
||||
# Custom logic here...
|
||||
self._status = value
|
||||
|
||||
# Example usage:
|
||||
my_service = StatusExample()
|
||||
my_service.status = MyStatus.FAILED
|
||||
```
|
||||
# Example usage:
|
||||
my_service = StatusExample()
|
||||
my_service.status = MyStatus.FAILED
|
||||
```
|
||||
|
||||
Note
|
||||
----
|
||||
Each enumeration name and value must be unique. This means that you should use
|
||||
different colour formats when you want to use a colour multiple times.
|
||||
Note:
|
||||
Each enumeration name and value must be unique. This means that you should use
|
||||
different colour formats when you want to use a colour multiple times.
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
|
||||
import pydase.data_service
|
||||
import pydase.task.decorator
|
||||
|
||||
|
||||
class DeviceConnection(pydase.data_service.DataService):
|
||||
@@ -19,22 +20,26 @@ class DeviceConnection(pydase.data_service.DataService):
|
||||
to the device. This method should update the `self._connected` attribute to reflect
|
||||
the connection status:
|
||||
|
||||
>>> class MyDeviceConnection(DeviceConnection):
|
||||
... def connect(self) -> None:
|
||||
... # Implementation to connect to the device
|
||||
... # Update self._connected to `True` if connection is successful,
|
||||
... # `False` otherwise
|
||||
... ...
|
||||
```python
|
||||
class MyDeviceConnection(DeviceConnection):
|
||||
def connect(self) -> None:
|
||||
# Implementation to connect to the device
|
||||
# Update self._connected to `True` if connection is successful,
|
||||
# `False` otherwise
|
||||
...
|
||||
```
|
||||
|
||||
Optionally, if additional logic is needed to determine the connection status,
|
||||
the `connected` property can also be overridden:
|
||||
|
||||
>>> class MyDeviceConnection(DeviceConnection):
|
||||
... @property
|
||||
... def connected(self) -> bool:
|
||||
... # Custom logic to determine connection status
|
||||
... return some_custom_condition
|
||||
...
|
||||
```python
|
||||
class MyDeviceConnection(DeviceConnection):
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
# Custom logic to determine connection status
|
||||
return some_custom_condition
|
||||
|
||||
```
|
||||
|
||||
Frontend Representation
|
||||
-----------------------
|
||||
@@ -48,7 +53,6 @@ class DeviceConnection(pydase.data_service.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._connected = False
|
||||
self._autostart_tasks["_handle_connection"] = () # type: ignore
|
||||
self._reconnection_wait_time = 10.0
|
||||
|
||||
def connect(self) -> None:
|
||||
@@ -66,6 +70,7 @@ class DeviceConnection(pydase.data_service.DataService):
|
||||
"""
|
||||
return self._connected
|
||||
|
||||
@pydase.task.decorator.task(autostart=True)
|
||||
async def _handle_connection(self) -> None:
|
||||
"""Automatically tries reconnecting to the device if it is not connected.
|
||||
This method leverages the `connect` method and the `connected` property to
|
||||
|
||||
@@ -11,76 +11,74 @@ class NumberSlider(DataService):
|
||||
This class models a UI slider for a data service, allowing for adjustments of a
|
||||
parameter within a specified range and increments.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
value (float, optional):
|
||||
The initial value of the slider. Defaults to 0.
|
||||
min (float, optional):
|
||||
The minimum value of the slider. Defaults to 0.
|
||||
max (float, optional):
|
||||
The maximum value of the slider. Defaults to 100.
|
||||
step_size (float, optional):
|
||||
The increment/decrement step size of the slider. Defaults to 1.0.
|
||||
Args:
|
||||
value:
|
||||
The initial value of the slider. Defaults to 0.
|
||||
min_:
|
||||
The minimum value of the slider. Defaults to 0.
|
||||
max_:
|
||||
The maximum value of the slider. Defaults to 100.
|
||||
step_size:
|
||||
The increment/decrement step size of the slider. Defaults to 1.0.
|
||||
|
||||
Example:
|
||||
--------
|
||||
```python
|
||||
class MySlider(pydase.components.NumberSlider):
|
||||
def __init__(
|
||||
self,
|
||||
value: float = 0.0,
|
||||
min_: float = 0.0,
|
||||
max_: float = 100.0,
|
||||
step_size: float = 1.0,
|
||||
) -> None:
|
||||
super().__init__(value, min_, max_, step_size)
|
||||
```python
|
||||
class MySlider(pydase.components.NumberSlider):
|
||||
def __init__(
|
||||
self,
|
||||
value: float = 0.0,
|
||||
min_: float = 0.0,
|
||||
max_: float = 100.0,
|
||||
step_size: float = 1.0,
|
||||
) -> None:
|
||||
super().__init__(value, min_, max_, step_size)
|
||||
|
||||
@property
|
||||
def min(self) -> float:
|
||||
return self._min
|
||||
@property
|
||||
def min(self) -> float:
|
||||
return self._min
|
||||
|
||||
@min.setter
|
||||
def min(self, value: float) -> None:
|
||||
self._min = value
|
||||
@min.setter
|
||||
def min(self, value: float) -> None:
|
||||
self._min = value
|
||||
|
||||
@property
|
||||
def max(self) -> float:
|
||||
return self._max
|
||||
@property
|
||||
def max(self) -> float:
|
||||
return self._max
|
||||
|
||||
@max.setter
|
||||
def max(self, value: float) -> None:
|
||||
self._max = value
|
||||
@max.setter
|
||||
def max(self, value: float) -> None:
|
||||
self._max = value
|
||||
|
||||
@property
|
||||
def step_size(self) -> float:
|
||||
return self._step_size
|
||||
@property
|
||||
def step_size(self) -> float:
|
||||
return self._step_size
|
||||
|
||||
@step_size.setter
|
||||
def step_size(self, value: float) -> None:
|
||||
self._step_size = value
|
||||
@step_size.setter
|
||||
def step_size(self, value: float) -> None:
|
||||
self._step_size = value
|
||||
|
||||
@property
|
||||
def value(self) -> float:
|
||||
return self._value
|
||||
@property
|
||||
def value(self) -> float:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: float) -> None:
|
||||
if value < self._min or value > self._max:
|
||||
raise ValueError(
|
||||
"Value is either below allowed min or above max value."
|
||||
)
|
||||
@value.setter
|
||||
def value(self, value: float) -> None:
|
||||
if value < self._min or value > self._max:
|
||||
raise ValueError(
|
||||
"Value is either below allowed min or above max value."
|
||||
)
|
||||
|
||||
self._value = value
|
||||
self._value = value
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
self.voltage = MyService()
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
self.voltage = MyService()
|
||||
|
||||
# Modifying or accessing the voltage value:
|
||||
my_service = MyService()
|
||||
my_service.voltage.value = 5
|
||||
print(my_service.voltage.value) # Output: 5
|
||||
```
|
||||
# Modifying or accessing the voltage value:
|
||||
my_service = MyService()
|
||||
my_service.voltage.value = 5
|
||||
print(my_service.voltage.value) # Output: 5
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -5,19 +5,31 @@ from confz import BaseConfig, EnvSource
|
||||
|
||||
|
||||
class OperationMode(BaseConfig): # type: ignore[misc]
|
||||
environment: Literal["development", "production"] = "development"
|
||||
environment: Literal["testing", "development", "production"] = "development"
|
||||
"""The service's operation mode."""
|
||||
|
||||
CONFIG_SOURCES = EnvSource(allow=["ENVIRONMENT"])
|
||||
|
||||
|
||||
class ServiceConfig(BaseConfig): # type: ignore[misc]
|
||||
"""Service configuration.
|
||||
|
||||
Variables can be set through environment variables prefixed with `SERVICE_` or an
|
||||
`.env` file containing those variables.
|
||||
"""
|
||||
|
||||
config_dir: Path = Path("config")
|
||||
"""Configuration directory"""
|
||||
web_port: int = 8001
|
||||
"""Web server port"""
|
||||
|
||||
CONFIG_SOURCES = EnvSource(allow_all=True, prefix="SERVICE_", file=".env")
|
||||
|
||||
|
||||
class WebServerConfig(BaseConfig): # type: ignore[misc]
|
||||
"""The service's web server configuration."""
|
||||
|
||||
generate_web_settings: bool = False
|
||||
"""Should generate web_settings.json file"""
|
||||
|
||||
CONFIG_SOURCES = EnvSource(allow=["GENERATE_WEB_SETTINGS"])
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydase.observer_pattern.observable.observable import Observable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydase.data_service.data_service import DataService
|
||||
from pydase.data_service.task_manager import TaskManager
|
||||
|
||||
|
||||
class AbstractDataService(Observable):
|
||||
__root__: DataService
|
||||
_task_manager: TaskManager
|
||||
_autostart_tasks: dict[str, tuple[Any]]
|
||||
pass
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import inspect
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.data_service.task_manager import TaskManager
|
||||
from pydase.observer_pattern.observable.observable import (
|
||||
Observable,
|
||||
)
|
||||
from pydase.utils.helpers import (
|
||||
get_class_and_instance_attributes,
|
||||
is_descriptor,
|
||||
is_property_attribute,
|
||||
)
|
||||
from pydase.utils.serialization.serializer import (
|
||||
@@ -24,11 +25,6 @@ logger = logging.getLogger(__name__)
|
||||
class DataService(AbstractDataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._task_manager = TaskManager(self)
|
||||
|
||||
if not hasattr(self, "_autostart_tasks"):
|
||||
self._autostart_tasks = {}
|
||||
|
||||
self.__check_instance_classes()
|
||||
|
||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||
@@ -73,8 +69,19 @@ class DataService(AbstractDataService):
|
||||
|
||||
if not issubclass(
|
||||
value_class,
|
||||
(int | float | bool | str | list | dict | Enum | u.Quantity | Observable),
|
||||
):
|
||||
(
|
||||
int
|
||||
| float
|
||||
| bool
|
||||
| str
|
||||
| list
|
||||
| dict
|
||||
| Enum
|
||||
| u.Quantity
|
||||
| Observable
|
||||
| Callable
|
||||
),
|
||||
) and not is_descriptor(__value):
|
||||
logger.warning(
|
||||
"Class '%s' does not inherit from DataService. This may lead to"
|
||||
" unexpected behaviour!",
|
||||
|
||||
@@ -8,7 +8,10 @@ from pydase.observer_pattern.observable.observable_object import ObservableObjec
|
||||
from pydase.observer_pattern.observer.property_observer import (
|
||||
PropertyObserver,
|
||||
)
|
||||
from pydase.utils.helpers import get_object_attr_from_path
|
||||
from pydase.utils.helpers import (
|
||||
get_object_attr_from_path,
|
||||
normalize_full_access_path_string,
|
||||
)
|
||||
from pydase.utils.serialization.serializer import (
|
||||
SerializationPathError,
|
||||
SerializedObject,
|
||||
@@ -53,7 +56,7 @@ class DataServiceObserver(PropertyObserver):
|
||||
cached_value = cached_value_dict.get("value")
|
||||
if (
|
||||
all(part[0] != "_" for part in full_access_path.split("."))
|
||||
and cached_value != value
|
||||
and cached_value != dump(value)["value"]
|
||||
):
|
||||
logger.debug("'%s' changed to '%s'", full_access_path, value)
|
||||
|
||||
@@ -99,7 +102,8 @@ class DataServiceObserver(PropertyObserver):
|
||||
)
|
||||
|
||||
def _notify_dependent_property_changes(self, changed_attr_path: str) -> None:
|
||||
changed_props = self.property_deps_dict.get(changed_attr_path, [])
|
||||
normalized_attr_path = normalize_full_access_path_string(changed_attr_path)
|
||||
changed_props = self.property_deps_dict.get(normalized_attr_path, [])
|
||||
for prop in changed_props:
|
||||
# only notify about changing attribute if it is not currently being
|
||||
# "changed" e.g. when calling the getter of a property within another
|
||||
@@ -124,8 +128,10 @@ class DataServiceObserver(PropertyObserver):
|
||||
object.
|
||||
|
||||
Args:
|
||||
callback (Callable[[str, Any, dict[str, Any]]): The callback function to be
|
||||
registered. The function should have the following signature:
|
||||
callback:
|
||||
The callback function to be registered. The function should have the
|
||||
following signature:
|
||||
|
||||
- full_access_path (str): The full dot-notation access path of the
|
||||
changed attribute. This path indicates the location of the changed
|
||||
attribute within the observable object's structure.
|
||||
|
||||
@@ -33,17 +33,19 @@ def load_state(func: Callable[..., Any]) -> Callable[..., Any]:
|
||||
the value should be loaded from the JSON file.
|
||||
|
||||
Example:
|
||||
>>> class Service(pydase.DataService):
|
||||
... _name = "Service"
|
||||
...
|
||||
... @property
|
||||
... def name(self) -> str:
|
||||
... return self._name
|
||||
...
|
||||
... @name.setter
|
||||
... @load_state
|
||||
... def name(self, value: str) -> None:
|
||||
... self._name = value
|
||||
```python
|
||||
class Service(pydase.DataService):
|
||||
_name = "Service"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
@load_state
|
||||
def name(self, value: str) -> None:
|
||||
self._name = value
|
||||
```
|
||||
"""
|
||||
|
||||
func._load_state = True # type: ignore[attr-defined]
|
||||
@@ -85,13 +87,11 @@ class StateManager:
|
||||
StateManager provides a snapshot of the DataService's state that is sufficiently
|
||||
accurate for initial rendering and interaction.
|
||||
|
||||
Attributes:
|
||||
cache (dict[str, Any]):
|
||||
A dictionary cache of the DataService's state.
|
||||
filename (str):
|
||||
The file name used for storing the DataService's state.
|
||||
service (DataService):
|
||||
Args:
|
||||
service:
|
||||
The DataService instance whose state is being managed.
|
||||
filename:
|
||||
The file name used for storing the DataService's state.
|
||||
|
||||
Note:
|
||||
The StateManager's cache updates are triggered by notifications and do not
|
||||
@@ -200,9 +200,11 @@ class StateManager:
|
||||
It also handles type-specific conversions for the new value before setting it.
|
||||
|
||||
Args:
|
||||
path: A dot-separated string indicating the hierarchical path to the
|
||||
path:
|
||||
A dot-separated string indicating the hierarchical path to the
|
||||
attribute.
|
||||
value: The new value to set for the attribute.
|
||||
serialized_value:
|
||||
The serialized representation of the new value to set for the attribute.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,225 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.utils.helpers import (
|
||||
function_has_arguments,
|
||||
get_class_and_instance_attributes,
|
||||
is_property_attribute,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from .data_service import DataService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TaskStatus(Enum):
|
||||
RUNNING = "running"
|
||||
|
||||
|
||||
class TaskManager:
|
||||
"""
|
||||
The TaskManager class is a utility designed to manage asynchronous tasks. It
|
||||
provides functionality for starting, stopping, and tracking these tasks. The class
|
||||
is primarily used by the DataService class to manage its tasks.
|
||||
|
||||
A task in TaskManager is any asynchronous function. To add a task, you simply need
|
||||
to define an async function within your class that extends TaskManager. For example:
|
||||
|
||||
```python
|
||||
class MyService(DataService):
|
||||
async def my_task(self):
|
||||
# Your task implementation here
|
||||
pass
|
||||
```
|
||||
|
||||
With the above definition, TaskManager automatically creates `start_my_task` and
|
||||
`stop_my_task` methods that can be used to control the task.
|
||||
|
||||
TaskManager also supports auto-starting tasks. If there are tasks that should start
|
||||
running as soon as an instance of your class is created, you can define them in
|
||||
`self._autostart_tasks` in your class constructor (__init__ method). Here's how:
|
||||
|
||||
```python
|
||||
class MyService(DataService):
|
||||
def __init__(self):
|
||||
self._autostart_tasks = {
|
||||
"my_task": (*args) # Replace with actual arguments
|
||||
}
|
||||
self.wait_time = 1
|
||||
super().__init__()
|
||||
|
||||
async def my_task(self, *args):
|
||||
while True:
|
||||
# Your task implementation here
|
||||
await asyncio.sleep(self.wait_time)
|
||||
```
|
||||
|
||||
In the above example, `my_task` will start running as soon as
|
||||
`_start_autostart_tasks` is called which is done when the DataService instance is
|
||||
passed to the `pydase.Server` class.
|
||||
|
||||
The responsibilities of the TaskManager class are:
|
||||
|
||||
- Track all running tasks: Keeps track of all the tasks that are currently running.
|
||||
This allows for monitoring of task statuses and for making sure tasks do not
|
||||
overlap.
|
||||
- Provide the ability to start and stop tasks: Automatically creates methods to
|
||||
start and stop each task.
|
||||
- Emit notifications when the status of a task changes: Has a built-in mechanism for
|
||||
emitting notifications when a task starts or stops. This is used to update the user
|
||||
interfaces, but can also be used to write logs, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, service: DataService) -> None:
|
||||
self.service = service
|
||||
|
||||
self.tasks: dict[str, asyncio.Task[None]] = {}
|
||||
"""A dictionary to keep track of running tasks. The keys are the names of the
|
||||
tasks and the values are TaskDict instances which include the task itself and
|
||||
its kwargs.
|
||||
"""
|
||||
|
||||
self._set_start_and_stop_for_async_methods()
|
||||
|
||||
@property
|
||||
def _loop(self) -> asyncio.AbstractEventLoop:
|
||||
return asyncio.get_running_loop()
|
||||
|
||||
def _set_start_and_stop_for_async_methods(self) -> None:
|
||||
for name in dir(self.service):
|
||||
# circumvents calling properties
|
||||
if is_property_attribute(self.service, name):
|
||||
continue
|
||||
|
||||
method = getattr(self.service, name)
|
||||
if inspect.iscoroutinefunction(method):
|
||||
if function_has_arguments(method):
|
||||
logger.info(
|
||||
"Async function %a is defined with at least one argument. If "
|
||||
"you want to use it as a task, remove the argument(s) from the "
|
||||
"function definition.",
|
||||
method.__name__,
|
||||
)
|
||||
continue
|
||||
|
||||
# create start and stop methods for each coroutine
|
||||
setattr(
|
||||
self.service, f"start_{name}", self._make_start_task(name, method)
|
||||
)
|
||||
setattr(self.service, f"stop_{name}", self._make_stop_task(name))
|
||||
|
||||
def _initiate_task_startup(self) -> None:
|
||||
if self.service._autostart_tasks is not None:
|
||||
for service_name, args in self.service._autostart_tasks.items():
|
||||
start_method = getattr(self.service, f"start_{service_name}", None)
|
||||
if start_method is not None and callable(start_method):
|
||||
start_method(*args)
|
||||
else:
|
||||
logger.warning(
|
||||
"No start method found for service '%s'", service_name
|
||||
)
|
||||
|
||||
def start_autostart_tasks(self) -> None:
|
||||
self._initiate_task_startup()
|
||||
attrs = get_class_and_instance_attributes(self.service)
|
||||
|
||||
for attr_value in attrs.values():
|
||||
if isinstance(attr_value, AbstractDataService):
|
||||
attr_value._task_manager.start_autostart_tasks()
|
||||
elif isinstance(attr_value, list):
|
||||
for item in attr_value:
|
||||
if isinstance(item, AbstractDataService):
|
||||
item._task_manager.start_autostart_tasks()
|
||||
|
||||
def _make_stop_task(self, name: str) -> Callable[..., Any]:
|
||||
"""
|
||||
Factory function to create a 'stop_task' function for a running task.
|
||||
|
||||
The generated function cancels the associated asyncio task using 'name' for
|
||||
identification, ensuring proper cleanup. Avoids closure and late binding issues.
|
||||
|
||||
Args:
|
||||
name (str): The name of the coroutine task, used for its identification.
|
||||
"""
|
||||
|
||||
def stop_task() -> None:
|
||||
# cancel the task
|
||||
task = self.tasks.get(name, None)
|
||||
if task is not None:
|
||||
self._loop.call_soon_threadsafe(task.cancel)
|
||||
|
||||
return stop_task
|
||||
|
||||
def _make_start_task(
|
||||
self, name: str, method: Callable[..., Any]
|
||||
) -> Callable[..., Any]:
|
||||
"""
|
||||
Factory function to create a 'start_task' function for a coroutine.
|
||||
|
||||
The generated function starts the coroutine as an asyncio task, handling
|
||||
registration and monitoring.
|
||||
It uses 'name' and 'method' to avoid the closure and late binding issue.
|
||||
|
||||
Args:
|
||||
name (str): The name of the coroutine, used for task management.
|
||||
method (callable): The coroutine to be turned into an asyncio task.
|
||||
"""
|
||||
|
||||
def start_task() -> None:
|
||||
def task_done_callback(task: asyncio.Task[None], name: str) -> None:
|
||||
"""Handles tasks that have finished.
|
||||
|
||||
Removes a task from the tasks dictionary, calls the defined
|
||||
callbacks, and logs and re-raises exceptions."""
|
||||
|
||||
# removing the finished task from the tasks i
|
||||
self.tasks.pop(name, None)
|
||||
|
||||
# emit the notification that the task was stopped
|
||||
self.service._notify_changed(name, None)
|
||||
|
||||
exception = task.exception()
|
||||
if exception is not None:
|
||||
# Handle the exception, or you can re-raise it.
|
||||
logger.error(
|
||||
"Task '%s' encountered an exception: %s: %s",
|
||||
name,
|
||||
type(exception).__name__,
|
||||
exception,
|
||||
)
|
||||
raise exception
|
||||
|
||||
async def task() -> None:
|
||||
try:
|
||||
await method()
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Task '%s' was cancelled", name)
|
||||
|
||||
if not self.tasks.get(name):
|
||||
# creating the task and adding the task_done_callback which checks
|
||||
# if an exception has occured during the task execution
|
||||
task_object = self._loop.create_task(task())
|
||||
task_object.add_done_callback(
|
||||
lambda task: task_done_callback(task, name)
|
||||
)
|
||||
|
||||
# Store the task and its arguments in the '__tasks' dictionary. The
|
||||
# key is the name of the method, and the value is a dictionary
|
||||
# containing the task object and the updated keyword arguments.
|
||||
self.tasks[name] = task_object
|
||||
|
||||
# emit the notification that the task was started
|
||||
self.service._notify_changed(name, TaskStatus.RUNNING)
|
||||
else:
|
||||
logger.error("Task '%s' is already running!", name)
|
||||
|
||||
return start_task
|
||||
BIN
src/pydase/frontend/favicon.ico
Normal file
|
After Width: | Height: | Size: 77 KiB |
@@ -3,13 +3,20 @@
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta name="description" content="Web site displaying a pydase UI." />
|
||||
<script type="module" crossorigin src="/assets/index-D7tStNHJ.js"></script>
|
||||
<script type="module" crossorigin src="/assets/index-BqF7l_R8.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-D2aktF3W.css">
|
||||
</head>
|
||||
|
||||
<script>
|
||||
// this will be set by the python backend if the service is behind a proxy which strips a prefix. The frontend can use this to build the paths to the resources.
|
||||
window.__FORWARDED_PREFIX__ = "";
|
||||
window.__FORWARDED_PROTO__ = "";
|
||||
</script>`
|
||||
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
|
||||
@@ -17,10 +17,10 @@ def validate_set(
|
||||
getter and check against the desired value.
|
||||
|
||||
Args:
|
||||
timeout (float):
|
||||
timeout:
|
||||
The maximum time (in seconds) to wait for the value to be within the
|
||||
precision boundary.
|
||||
precision (float | None):
|
||||
precision:
|
||||
The acceptable deviation from the desired value. If None, the value must be
|
||||
exact.
|
||||
"""
|
||||
@@ -44,13 +44,11 @@ def has_validate_set_decorator(prop: property) -> bool:
|
||||
Checks if a property setter has been decorated with the `validate_set` decorator.
|
||||
|
||||
Args:
|
||||
prop (property):
|
||||
prop:
|
||||
The property to check.
|
||||
|
||||
Returns:
|
||||
bool:
|
||||
True if the property setter has the `validate_set` decorator, False
|
||||
otherwise.
|
||||
True if the property setter has the `validate_set` decorator, False otherwise.
|
||||
"""
|
||||
|
||||
property_setter = prop.fset
|
||||
@@ -68,11 +66,11 @@ def _validate_value_was_correctly_set(
|
||||
specified `precision` and time `timeout`.
|
||||
|
||||
Args:
|
||||
obj (Observable):
|
||||
obj:
|
||||
The instance of the class containing the property.
|
||||
name (str):
|
||||
name:
|
||||
The name of the property to validate.
|
||||
value (Any):
|
||||
value:
|
||||
The desired value to check against.
|
||||
|
||||
Raises:
|
||||
|
||||
@@ -6,7 +6,7 @@ from pydase.observer_pattern.observable.decorators import (
|
||||
has_validate_set_decorator,
|
||||
)
|
||||
from pydase.observer_pattern.observable.observable_object import ObservableObject
|
||||
from pydase.utils.helpers import is_property_attribute
|
||||
from pydase.utils.helpers import is_descriptor, is_property_attribute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -22,7 +22,9 @@ class Observable(ObservableObject):
|
||||
- {"__annotations__"}
|
||||
}
|
||||
for name, value in class_attrs.items():
|
||||
if isinstance(value, property) or callable(value):
|
||||
if isinstance(value, property) or callable(value) or is_descriptor(value):
|
||||
# Properties, methods and descriptors have to be stored as class
|
||||
# attributes to work properly. So don't make it an instance attribute.
|
||||
continue
|
||||
self.__dict__[name] = self._initialise_new_objects(name, value)
|
||||
|
||||
|
||||
@@ -24,8 +24,7 @@ class Observer(ABC):
|
||||
self.on_change_start(changing_attribute)
|
||||
|
||||
@abstractmethod
|
||||
def on_change(self, full_access_path: str, value: Any) -> None:
|
||||
...
|
||||
def on_change(self, full_access_path: str, value: Any) -> None: ...
|
||||
|
||||
def on_change_start(self, full_access_path: str) -> None:
|
||||
return
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import Any
|
||||
|
||||
from pydase.observer_pattern.observable.observable import Observable
|
||||
from pydase.observer_pattern.observer.observer import Observer
|
||||
from pydase.utils.helpers import is_descriptor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -60,18 +61,28 @@ class PropertyObserver(Observer):
|
||||
def _process_nested_observables_properties(
|
||||
self, obj: Observable, deps: dict[str, Any], prefix: str
|
||||
) -> None:
|
||||
for k, value in vars(obj).items():
|
||||
for k, value in {**vars(type(obj)), **vars(obj)}.items():
|
||||
actual_value = value
|
||||
prefix = (
|
||||
f"{prefix}." if prefix != "" and not prefix.endswith(".") else prefix
|
||||
)
|
||||
parent_path = f"{prefix}{k}"
|
||||
if isinstance(value, Observable):
|
||||
|
||||
# Get value from descriptor
|
||||
if not isinstance(value, property) and is_descriptor(value):
|
||||
actual_value = getattr(obj, k)
|
||||
|
||||
if isinstance(actual_value, Observable):
|
||||
new_prefix = f"{parent_path}."
|
||||
deps.update(
|
||||
self._get_properties_and_their_dependencies(value, new_prefix)
|
||||
self._get_properties_and_their_dependencies(
|
||||
actual_value, new_prefix
|
||||
)
|
||||
)
|
||||
elif isinstance(value, list | dict):
|
||||
self._process_collection_item_properties(value, deps, parent_path)
|
||||
self._process_collection_item_properties(
|
||||
actual_value, deps, parent_path
|
||||
)
|
||||
|
||||
def _process_collection_item_properties(
|
||||
self,
|
||||
|
||||
@@ -13,6 +13,8 @@ from pydase.config import ServiceConfig
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.server.web_server import WebServer
|
||||
from pydase.task.autostart import autostart_service_tasks
|
||||
from pydase.utils.helpers import current_event_loop_exists
|
||||
|
||||
HANDLED_SIGNALS = (
|
||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||
@@ -35,18 +37,18 @@ class AdditionalServerProtocol(Protocol):
|
||||
|
||||
Args:
|
||||
data_service_observer:
|
||||
Observer for the DataService, handling state updates and communication to
|
||||
connected clients through injected callbacks. Can be utilized to access the
|
||||
service and state manager, and to add custom state-update callbacks.
|
||||
Observer for the DataService, handling state updates and communication to
|
||||
connected clients through injected callbacks. Can be utilized to access the
|
||||
service and state manager, and to add custom state-update callbacks.
|
||||
host:
|
||||
Hostname or IP address where the server is accessible. Commonly '0.0.0.0' to
|
||||
bind to all network interfaces.
|
||||
Hostname or IP address where the server is accessible. Commonly '0.0.0.0' to
|
||||
bind to all network interfaces.
|
||||
port:
|
||||
Port number on which the server listens. Typically in the range 1024-65535
|
||||
(non-standard ports).
|
||||
Port number on which the server listens. Typically in the range 1024-65535
|
||||
(non-standard ports).
|
||||
**kwargs:
|
||||
Any additional parameters required for initializing the server. These
|
||||
parameters are specific to the server's implementation.
|
||||
Any additional parameters required for initializing the server. These
|
||||
parameters are specific to the server's implementation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -64,18 +66,17 @@ class AdditionalServerProtocol(Protocol):
|
||||
|
||||
|
||||
class AdditionalServer(TypedDict):
|
||||
"""
|
||||
A TypedDict that represents the configuration for an additional server to be run
|
||||
"""A TypedDict that represents the configuration for an additional server to be run
|
||||
alongside the main server.
|
||||
|
||||
This class is used to specify the server type, the port on which the server should
|
||||
run, and any additional keyword arguments that should be passed to the server when
|
||||
it's instantiated.
|
||||
"""
|
||||
|
||||
server: type[AdditionalServerProtocol]
|
||||
"""Server adhering to the
|
||||
[`AdditionalServerProtocol`][pydase.server.server.AdditionalServerProtocol]."""
|
||||
port: int
|
||||
"""Port on which the server should run."""
|
||||
kwargs: dict[str, Any]
|
||||
"""Additional keyword arguments that will be passed to the server's constructor """
|
||||
|
||||
|
||||
class Server:
|
||||
@@ -83,29 +84,20 @@ class Server:
|
||||
The `Server` class provides a flexible server implementation for the `DataService`.
|
||||
|
||||
Args:
|
||||
service: DataService
|
||||
service:
|
||||
The DataService instance that this server will manage.
|
||||
host: str
|
||||
The host address for the server. Default is '0.0.0.0', which means all
|
||||
host:
|
||||
The host address for the server. Defaults to `'0.0.0.0'`, which means all
|
||||
available network interfaces.
|
||||
web_port: int
|
||||
The port number for the web server. Default is
|
||||
`pydase.config.ServiceConfig().web_port`.
|
||||
enable_web: bool
|
||||
Whether to enable the web server. Default is True.
|
||||
filename: str | Path | None
|
||||
web_port:
|
||||
The port number for the web server. Defaults to
|
||||
[`ServiceConfig().web_port`][pydase.config.ServiceConfig.web_port].
|
||||
enable_web:
|
||||
Whether to enable the web server.
|
||||
filename:
|
||||
Filename of the file managing the service state persistence.
|
||||
Defaults to None.
|
||||
additional_servers : list[AdditionalServer]
|
||||
A list of additional servers to run alongside the main server. Each entry in
|
||||
the list should be a dictionary with the following structure:
|
||||
- server: A class that adheres to the AdditionalServerProtocol. This
|
||||
class should have an `__init__` method that accepts the DataService
|
||||
instance, port, host, and optional keyword arguments, and a `serve`
|
||||
method that is a coroutine responsible for starting the server.
|
||||
- port: The port on which the additional server will be running.
|
||||
- kwargs: A dictionary containing additional keyword arguments that will
|
||||
be passed to the server's `__init__` method.
|
||||
additional_servers:
|
||||
A list of additional servers to run alongside the main server.
|
||||
|
||||
Here's an example of how you might define an additional server:
|
||||
|
||||
@@ -145,8 +137,8 @@ class Server:
|
||||
)
|
||||
server.run()
|
||||
```
|
||||
**kwargs: Any
|
||||
Additional keyword arguments.
|
||||
**kwargs:
|
||||
Additional keyword arguments.
|
||||
"""
|
||||
|
||||
def __init__( # noqa: PLR0913
|
||||
@@ -166,13 +158,18 @@ class Server:
|
||||
self._web_port = web_port
|
||||
self._enable_web = enable_web
|
||||
self._kwargs = kwargs
|
||||
self._loop: asyncio.AbstractEventLoop
|
||||
self._additional_servers = additional_servers
|
||||
self.should_exit = False
|
||||
self.servers: dict[str, asyncio.Future[Any]] = {}
|
||||
self._state_manager = StateManager(self._service, filename)
|
||||
self._observer = DataServiceObserver(self._state_manager)
|
||||
self._state_manager.load_state()
|
||||
autostart_service_tasks(self._service)
|
||||
if not current_event_loop_exists():
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
else:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
@@ -180,7 +177,7 @@ class Server:
|
||||
|
||||
This method should be called to start the server after it's been instantiated.
|
||||
"""
|
||||
asyncio.run(self.serve())
|
||||
self._loop.run_until_complete(self.serve())
|
||||
|
||||
async def serve(self) -> None:
|
||||
process_id = os.getpid()
|
||||
@@ -196,10 +193,8 @@ class Server:
|
||||
logger.info("Finished server process [%s]", process_id)
|
||||
|
||||
async def startup(self) -> None:
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._loop.set_exception_handler(self.custom_exception_handler)
|
||||
self.install_signal_handlers()
|
||||
self._service._task_manager.start_autostart_tasks()
|
||||
|
||||
for server in self._additional_servers:
|
||||
addin_server = server["server"](
|
||||
@@ -214,7 +209,7 @@ class Server:
|
||||
)
|
||||
|
||||
server_task = self._loop.create_task(addin_server.serve())
|
||||
server_task.add_done_callback(self.handle_server_shutdown)
|
||||
server_task.add_done_callback(self._handle_server_shutdown)
|
||||
self.servers[server_name] = server_task
|
||||
if self._enable_web:
|
||||
self._web_server = WebServer(
|
||||
@@ -225,10 +220,10 @@ class Server:
|
||||
)
|
||||
server_task = self._loop.create_task(self._web_server.serve())
|
||||
|
||||
server_task.add_done_callback(self.handle_server_shutdown)
|
||||
server_task.add_done_callback(self._handle_server_shutdown)
|
||||
self.servers["web"] = server_task
|
||||
|
||||
def handle_server_shutdown(self, task: asyncio.Task[Any]) -> None:
|
||||
def _handle_server_shutdown(self, task: asyncio.Task[Any]) -> None:
|
||||
"""Handle server shutdown. If the service should exit, do nothing. Else, make
|
||||
the service exit."""
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import inspect
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -7,9 +8,11 @@ import aiohttp_middlewares.error
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.server.web_server.api.v1.endpoints import (
|
||||
get_value,
|
||||
trigger_async_method,
|
||||
trigger_method,
|
||||
update_value,
|
||||
)
|
||||
from pydase.utils.helpers import get_object_attr_from_path
|
||||
from pydase.utils.serialization.serializer import dump
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -17,54 +20,79 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
API_VERSION = "v1"
|
||||
|
||||
STATUS_OK = 200
|
||||
STATUS_FAILED = 400
|
||||
|
||||
|
||||
async def _get_value(
|
||||
state_manager: StateManager, request: aiohttp.web.Request
|
||||
) -> aiohttp.web.Response:
|
||||
logger.info("Handle api request: %s", request)
|
||||
|
||||
access_path = request.rel_url.query["access_path"]
|
||||
|
||||
status = STATUS_OK
|
||||
try:
|
||||
result = get_value(state_manager, access_path)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
result = dump(e)
|
||||
status = STATUS_FAILED
|
||||
return aiohttp.web.json_response(result, status=status)
|
||||
|
||||
|
||||
async def _update_value(
|
||||
state_manager: StateManager, request: aiohttp.web.Request
|
||||
) -> aiohttp.web.Response:
|
||||
data: UpdateDict = await request.json()
|
||||
|
||||
try:
|
||||
update_value(state_manager, data)
|
||||
|
||||
return aiohttp.web.json_response()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||
|
||||
|
||||
async def _trigger_method(
|
||||
state_manager: StateManager, request: aiohttp.web.Request
|
||||
) -> aiohttp.web.Response:
|
||||
data: TriggerMethodDict = await request.json()
|
||||
|
||||
method = get_object_attr_from_path(state_manager.service, data["access_path"])
|
||||
|
||||
try:
|
||||
if inspect.iscoroutinefunction(method):
|
||||
method_return = await trigger_async_method(
|
||||
state_manager=state_manager, data=data
|
||||
)
|
||||
else:
|
||||
method_return = trigger_method(state_manager=state_manager, data=data)
|
||||
|
||||
return aiohttp.web.json_response(method_return)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||
|
||||
|
||||
def create_api_application(state_manager: StateManager) -> aiohttp.web.Application:
|
||||
api_application = aiohttp.web.Application(
|
||||
middlewares=(aiohttp_middlewares.error.error_middleware(),)
|
||||
)
|
||||
|
||||
async def _get_value(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
||||
logger.info("Handle api request: %s", request)
|
||||
|
||||
access_path = request.rel_url.query["access_path"]
|
||||
|
||||
status = STATUS_OK
|
||||
try:
|
||||
result = get_value(state_manager, access_path)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
result = dump(e)
|
||||
status = STATUS_FAILED
|
||||
return aiohttp.web.json_response(result, status=status)
|
||||
|
||||
async def _update_value(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
||||
data: UpdateDict = await request.json()
|
||||
|
||||
try:
|
||||
update_value(state_manager, data)
|
||||
|
||||
return aiohttp.web.json_response()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||
|
||||
async def _trigger_method(request: aiohttp.web.Request) -> aiohttp.web.Response:
|
||||
data: TriggerMethodDict = await request.json()
|
||||
|
||||
try:
|
||||
return aiohttp.web.json_response(trigger_method(state_manager, data))
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return aiohttp.web.json_response(dump(e), status=STATUS_FAILED)
|
||||
|
||||
api_application.router.add_get("/get_value", _get_value)
|
||||
api_application.router.add_put("/update_value", _update_value)
|
||||
api_application.router.add_put("/trigger_method", _trigger_method)
|
||||
api_application.router.add_get(
|
||||
"/get_value",
|
||||
lambda request: _get_value(state_manager=state_manager, request=request),
|
||||
)
|
||||
api_application.router.add_put(
|
||||
"/update_value",
|
||||
lambda request: _update_value(state_manager=state_manager, request=request),
|
||||
)
|
||||
api_application.router.add_put(
|
||||
"/trigger_method",
|
||||
lambda request: _trigger_method(state_manager=state_manager, request=request),
|
||||
)
|
||||
|
||||
return api_application
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import pydase.utils.serialization.deserializer
|
||||
import pydase.utils.serialization.serializer
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.server.web_server.sio_setup import TriggerMethodDict, UpdateDict
|
||||
from pydase.utils.helpers import get_object_attr_from_path
|
||||
from pydase.utils.serialization.deserializer import loads
|
||||
from pydase.utils.serialization.serializer import Serializer, dump
|
||||
from pydase.utils.serialization.types import SerializedObject
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Awaitable, Callable
|
||||
|
||||
loads = pydase.utils.serialization.deserializer.loads
|
||||
Serializer = pydase.utils.serialization.serializer.Serializer
|
||||
|
||||
|
||||
def update_value(state_manager: StateManager, data: UpdateDict) -> None:
|
||||
path = data["access_path"]
|
||||
@@ -32,4 +38,20 @@ def trigger_method(state_manager: StateManager, data: TriggerMethodDict) -> Any:
|
||||
serialized_kwargs = data.get("kwargs", None)
|
||||
kwargs: dict[str, Any] = loads(serialized_kwargs) if serialized_kwargs else {}
|
||||
|
||||
return dump(method(*args, **kwargs))
|
||||
return Serializer.serialize_object(method(*args, **kwargs))
|
||||
|
||||
|
||||
async def trigger_async_method(
|
||||
state_manager: StateManager, data: TriggerMethodDict
|
||||
) -> Any:
|
||||
method: Callable[..., Awaitable[Any]] = get_object_attr_from_path(
|
||||
state_manager.service, data["access_path"]
|
||||
)
|
||||
|
||||
serialized_args = data.get("args", None)
|
||||
args = loads(serialized_args) if serialized_args else []
|
||||
|
||||
serialized_kwargs = data.get("kwargs", None)
|
||||
kwargs: dict[str, Any] = loads(serialized_kwargs) if serialized_kwargs else {}
|
||||
|
||||
return Serializer.serialize_object(await method(*args, **kwargs))
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, TypedDict
|
||||
|
||||
from pydase.utils.helpers import get_object_attr_from_path
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import NotRequired
|
||||
else:
|
||||
@@ -11,19 +14,19 @@ else:
|
||||
import click
|
||||
import socketio # type: ignore[import-untyped]
|
||||
|
||||
import pydase.server.web_server.api.v1.endpoints
|
||||
import pydase.utils.serialization.deserializer
|
||||
import pydase.utils.serialization.serializer
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.server.web_server.api.v1 import endpoints
|
||||
from pydase.utils.logging import SocketIOHandler
|
||||
from pydase.utils.serialization.serializer import SerializedObject
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# These functions can be monkey-patched by other libraries at runtime
|
||||
loads = pydase.utils.serialization.deserializer.loads
|
||||
dump = pydase.utils.serialization.serializer.dump
|
||||
sio_client_manager = None
|
||||
|
||||
|
||||
class UpdateDict(TypedDict):
|
||||
@@ -55,12 +58,15 @@ class RunMethodDict(TypedDict):
|
||||
exposed DataService.
|
||||
|
||||
Attributes:
|
||||
name (str): The name of the method to be run.
|
||||
parent_path (str): The access path for the parent object of the method to be
|
||||
run. This is used to construct the full access path for the method. For
|
||||
example, for an method with access path 'attr1.list_attr[0].method_name',
|
||||
'attr1.list_attr[0]' would be the parent_path.
|
||||
kwargs (dict[str, Any]): The arguments passed to the method.
|
||||
name:
|
||||
The name of the method to be run.
|
||||
parent_path:
|
||||
The access path for the parent object of the method to be run. This is used
|
||||
to construct the full access path for the method. For example, for an method
|
||||
with access path 'attr1.list_attr[0].method_name', 'attr1.list_attr[0]'
|
||||
would be the parent_path.
|
||||
kwargs:
|
||||
The arguments passed to the method.
|
||||
"""
|
||||
|
||||
name: str
|
||||
@@ -77,23 +83,30 @@ def setup_sio_server(
|
||||
Sets up and configures a Socket.IO asynchronous server.
|
||||
|
||||
Args:
|
||||
observer (DataServiceObserver):
|
||||
The observer managing state updates and communication.
|
||||
enable_cors (bool):
|
||||
Flag indicating whether CORS should be enabled for the server.
|
||||
loop (asyncio.AbstractEventLoop):
|
||||
The event loop in which the server will run.
|
||||
observer:
|
||||
The observer managing state updates and communication.
|
||||
enable_cors:
|
||||
Flag indicating whether CORS should be enabled for the server.
|
||||
loop:
|
||||
The event loop in which the server will run.
|
||||
|
||||
Returns:
|
||||
socketio.AsyncServer: The configured Socket.IO asynchronous server.
|
||||
The configured Socket.IO asynchronous server.
|
||||
"""
|
||||
|
||||
state_manager = observer.state_manager
|
||||
|
||||
if enable_cors:
|
||||
sio = socketio.AsyncServer(async_mode="aiohttp", cors_allowed_origins="*")
|
||||
sio = socketio.AsyncServer(
|
||||
async_mode="aiohttp",
|
||||
cors_allowed_origins="*",
|
||||
client_manager=sio_client_manager,
|
||||
)
|
||||
else:
|
||||
sio = socketio.AsyncServer(async_mode="aiohttp")
|
||||
sio = socketio.AsyncServer(
|
||||
async_mode="aiohttp",
|
||||
client_manager=sio_client_manager,
|
||||
)
|
||||
|
||||
setup_sio_events(sio, state_manager)
|
||||
setup_logging_handler(sio)
|
||||
@@ -128,15 +141,15 @@ def setup_sio_server(
|
||||
def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) -> None: # noqa: C901
|
||||
@sio.event # type: ignore
|
||||
async def connect(sid: str, environ: Any) -> None:
|
||||
logging.debug("Client [%s] connected", click.style(str(sid), fg="cyan"))
|
||||
logger.debug("Client [%s] connected", click.style(str(sid), fg="cyan"))
|
||||
|
||||
@sio.event # type: ignore
|
||||
async def disconnect(sid: str) -> None:
|
||||
logging.debug("Client [%s] disconnected", click.style(str(sid), fg="cyan"))
|
||||
logger.debug("Client [%s] disconnected", click.style(str(sid), fg="cyan"))
|
||||
|
||||
@sio.event # type: ignore
|
||||
async def service_serialization(sid: str) -> SerializedObject:
|
||||
logging.debug(
|
||||
logger.debug(
|
||||
"Client [%s] requested service serialization",
|
||||
click.style(str(sid), fg="cyan"),
|
||||
)
|
||||
@@ -145,9 +158,7 @@ def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) ->
|
||||
@sio.event
|
||||
async def update_value(sid: str, data: UpdateDict) -> SerializedObject | None:
|
||||
try:
|
||||
pydase.server.web_server.api.v1.endpoints.update_value(
|
||||
state_manager=state_manager, data=data
|
||||
)
|
||||
endpoints.update_value(state_manager=state_manager, data=data)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return dump(e)
|
||||
@@ -156,7 +167,7 @@ def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) ->
|
||||
@sio.event
|
||||
async def get_value(sid: str, access_path: str) -> SerializedObject:
|
||||
try:
|
||||
return pydase.server.web_server.api.v1.endpoints.get_value(
|
||||
return endpoints.get_value(
|
||||
state_manager=state_manager, access_path=access_path
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -165,10 +176,14 @@ def setup_sio_events(sio: socketio.AsyncServer, state_manager: StateManager) ->
|
||||
|
||||
@sio.event
|
||||
async def trigger_method(sid: str, data: TriggerMethodDict) -> Any:
|
||||
method = get_object_attr_from_path(state_manager.service, data["access_path"])
|
||||
|
||||
try:
|
||||
return pydase.server.web_server.api.v1.endpoints.trigger_method(
|
||||
state_manager=state_manager, data=data
|
||||
)
|
||||
if inspect.iscoroutinefunction(method):
|
||||
return await endpoints.trigger_async_method(
|
||||
state_manager=state_manager, data=data
|
||||
)
|
||||
return endpoints.trigger_method(state_manager=state_manager, data=data)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return dump(e)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@@ -6,6 +7,7 @@ from typing import Any
|
||||
|
||||
import aiohttp.web
|
||||
import aiohttp_middlewares.cors
|
||||
import anyio
|
||||
|
||||
from pydase.config import ServiceConfig, WebServerConfig
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
@@ -20,46 +22,57 @@ from pydase.utils.helpers import (
|
||||
from pydase.utils.serialization.serializer import generate_serialized_data_paths
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
API_VERSION = "v1"
|
||||
|
||||
|
||||
class WebServer:
|
||||
"""
|
||||
Represents a web server that adheres to the AdditionalServerProtocol, designed to
|
||||
work with a DataService instance. This server facilitates client-server
|
||||
communication and state management through web protocols and socket connections.
|
||||
Represents a web server that adheres to the
|
||||
[`AdditionalServerProtocol`][pydase.server.server.AdditionalServerProtocol],
|
||||
designed to work with a [`DataService`][pydase.DataService] instance. This server
|
||||
facilitates client-server communication and state management through web protocols
|
||||
and socket connections.
|
||||
|
||||
The WebServer class initializes and manages a web server environment using FastAPI
|
||||
and Socket.IO, allowing for HTTP and WebSocket communications. It incorporates CORS
|
||||
(Cross-Origin Resource Sharing) support, custom CSS, and serves a frontend static
|
||||
files directory. It also initializes web server settings based on configuration
|
||||
files or generates default settings if necessary.
|
||||
The WebServer class initializes and manages a web server environment aiohttp and
|
||||
Socket.IO, allowing for HTTP and Socket.IO communications. It incorporates CORS
|
||||
(Cross-Origin Resource Sharing) support, custom CSS, and serves a static files
|
||||
directory. It also initializes web server settings based on configuration files or
|
||||
generates default settings if necessary.
|
||||
|
||||
Configuration for the web server (like service configuration directory and whether
|
||||
to generate new web settings) is determined in the following order of precedence:
|
||||
|
||||
1. Values provided directly to the constructor.
|
||||
2. Environment variable settings (via configuration classes like
|
||||
`pydase.config.ServiceConfig` and `pydase.config.WebServerConfig`).
|
||||
[`ServiceConfig`][pydase.config.ServiceConfig] and
|
||||
[`WebServerConfig`][pydase.config.WebServerConfig]).
|
||||
3. Default values defined in the configuration classes.
|
||||
|
||||
Args:
|
||||
data_service_observer (DataServiceObserver): Observer for the DataService,
|
||||
handling state updates and communication to connected clients.
|
||||
host (str): Hostname or IP address where the server is accessible. Commonly
|
||||
'0.0.0.0' to bind to all network interfaces.
|
||||
port (int): Port number on which the server listens. Typically in the range
|
||||
1024-65535 (non-standard ports).
|
||||
css (str | Path | None, optional): Path to a custom CSS file for styling the
|
||||
frontend. If None, no custom styles are applied. Defaults to None.
|
||||
enable_cors (bool, optional): Flag to enable or disable CORS policy. When True,
|
||||
CORS is enabled, allowing cross-origin requests. Defaults to True.
|
||||
config_dir (Path | None, optional): Path to the configuration
|
||||
directory where the web settings will be stored. Defaults to
|
||||
`pydase.config.ServiceConfig().config_dir`.
|
||||
generate_new_web_settings (bool | None, optional): Flag to enable or disable
|
||||
generation of new web settings if the configuration file is missing. Defaults
|
||||
to `pydase.config.WebServerConfig().generate_new_web_settings`.
|
||||
**kwargs (Any): Additional unused keyword arguments.
|
||||
data_service_observer:
|
||||
Observer for the [`DataService`][pydase.DataService], handling state updates
|
||||
and communication to connected clients.
|
||||
host:
|
||||
Hostname or IP address where the server is accessible. Commonly '0.0.0.0'
|
||||
to bind to all network interfaces.
|
||||
port:
|
||||
Port number on which the server listens. Typically in the range 1024-65535
|
||||
(non-standard ports).
|
||||
css:
|
||||
Path to a custom CSS file for styling the frontend. If None, no custom
|
||||
styles are applied. Defaults to None.
|
||||
favicon_path:
|
||||
Path to a custom favicon.ico file. Defaults to None.
|
||||
enable_cors:
|
||||
Flag to enable or disable CORS policy. When True, CORS is enabled, allowing
|
||||
cross-origin requests. Defaults to True.
|
||||
config_dir:
|
||||
Path to the configuration directory where the web settings will be stored.
|
||||
Defaults to
|
||||
[`ServiceConfig().config_dir`][pydase.config.ServiceConfig.config_dir].
|
||||
generate_web_settings:
|
||||
Flag to enable or disable generation of new web settings if the
|
||||
configuration file is missing. Defaults to
|
||||
[`WebServerConfig().generate_web_settings`][pydase.config.WebServerConfig.generate_web_settings].
|
||||
"""
|
||||
|
||||
def __init__( # noqa: PLR0913
|
||||
@@ -67,7 +80,9 @@ class WebServer:
|
||||
data_service_observer: DataServiceObserver,
|
||||
host: str,
|
||||
port: int,
|
||||
*,
|
||||
css: str | Path | None = None,
|
||||
favicon_path: str | Path | None = None,
|
||||
enable_cors: bool = True,
|
||||
config_dir: Path = ServiceConfig().config_dir,
|
||||
generate_web_settings: bool = WebServerConfig().generate_web_settings,
|
||||
@@ -81,6 +96,11 @@ class WebServer:
|
||||
self.css = css
|
||||
self.enable_cors = enable_cors
|
||||
self.frontend_src = frontend_src
|
||||
self.favicon_path: Path | str = favicon_path # type: ignore
|
||||
|
||||
if self.favicon_path is None:
|
||||
self.favicon_path = self.frontend_src / "favicon.ico"
|
||||
|
||||
self._service_config_dir = config_dir
|
||||
self._generate_web_settings = generate_web_settings
|
||||
self._loop: asyncio.AbstractEventLoop
|
||||
@@ -90,7 +110,49 @@ class WebServer:
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._sio = setup_sio_server(self.observer, self.enable_cors, self._loop)
|
||||
|
||||
async def index(request: aiohttp.web.Request) -> aiohttp.web.FileResponse:
|
||||
async def index(
|
||||
request: aiohttp.web.Request,
|
||||
) -> aiohttp.web.Response | aiohttp.web.FileResponse:
|
||||
forwarded_proto = request.headers.get("X-Forwarded-Proto", "http")
|
||||
escaped_proto = html.escape(forwarded_proto)
|
||||
|
||||
# Read the index.html file
|
||||
index_file_path = self.frontend_src / "index.html"
|
||||
|
||||
async with await anyio.open_file(index_file_path) as f:
|
||||
html_content = await f.read()
|
||||
|
||||
# Inject the escaped forwarded protocol into the HTML
|
||||
modified_html = html_content.replace(
|
||||
'window.__FORWARDED_PROTO__ = "";',
|
||||
f'window.__FORWARDED_PROTO__ = "{escaped_proto}";',
|
||||
)
|
||||
|
||||
# Read the X-Forwarded-Prefix header from the request
|
||||
forwarded_prefix = request.headers.get("X-Forwarded-Prefix", "")
|
||||
|
||||
if forwarded_prefix != "":
|
||||
# Escape the forwarded prefix to prevent XSS
|
||||
escaped_prefix = html.escape(forwarded_prefix)
|
||||
|
||||
# Inject the escaped forwarded prefix into the HTML
|
||||
modified_html = modified_html.replace(
|
||||
'window.__FORWARDED_PREFIX__ = "";',
|
||||
f'window.__FORWARDED_PREFIX__ = "{escaped_prefix}";',
|
||||
)
|
||||
modified_html = modified_html.replace(
|
||||
"/assets/",
|
||||
f"{escaped_prefix}/assets/",
|
||||
)
|
||||
|
||||
modified_html = modified_html.replace(
|
||||
"/favicon.ico",
|
||||
f"{escaped_prefix}/favicon.ico",
|
||||
)
|
||||
|
||||
return aiohttp.web.Response(
|
||||
text=modified_html, content_type="text/html"
|
||||
)
|
||||
return aiohttp.web.FileResponse(self.frontend_src / "index.html")
|
||||
|
||||
app = aiohttp.web.Application()
|
||||
@@ -104,6 +166,7 @@ class WebServer:
|
||||
# Define routes
|
||||
self._sio.attach(app, socketio_path="/ws/socket.io")
|
||||
app.router.add_static("/assets", self.frontend_src / "assets")
|
||||
app.router.add_get("/favicon.ico", self._favicon_route)
|
||||
app.router.add_get("/service-properties", self._service_properties_route)
|
||||
app.router.add_get("/web-settings", self._web_settings_route)
|
||||
app.router.add_get("/custom.css", self._styles_route)
|
||||
@@ -121,6 +184,12 @@ class WebServer:
|
||||
shutdown_timeout=0.1,
|
||||
)
|
||||
|
||||
async def _favicon_route(
|
||||
self,
|
||||
request: aiohttp.web.Request,
|
||||
) -> aiohttp.web.FileResponse:
|
||||
return aiohttp.web.FileResponse(self.favicon_path)
|
||||
|
||||
async def _service_properties_route(
|
||||
self,
|
||||
request: aiohttp.web.Request,
|
||||
|
||||
0
src/pydase/task/__init__.py
Normal file
46
src/pydase/task/autostart.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from typing import Any
|
||||
|
||||
import pydase.data_service.data_service
|
||||
import pydase.task.task
|
||||
from pydase.task.task_status import TaskStatus
|
||||
from pydase.utils.helpers import is_property_attribute
|
||||
|
||||
|
||||
def autostart_service_tasks(
|
||||
service: pydase.data_service.data_service.DataService,
|
||||
) -> None:
|
||||
"""Starts the service tasks defined with the `autostart` keyword argument.
|
||||
|
||||
This method goes through the attributes of the passed service and its nested
|
||||
[`DataService`][pydase.DataService] instances and calls the start method on
|
||||
autostart-tasks.
|
||||
"""
|
||||
|
||||
for attr in dir(service):
|
||||
if is_property_attribute(service, attr) or attr in {
|
||||
"_observers",
|
||||
"__dict__",
|
||||
}: # prevent eval of property attrs and recursion
|
||||
continue
|
||||
|
||||
val = getattr(service, attr)
|
||||
if isinstance(val, pydase.task.task.Task):
|
||||
if val.autostart and val.status == TaskStatus.NOT_RUNNING:
|
||||
val.start()
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
autostart_nested_service_tasks(val)
|
||||
|
||||
|
||||
def autostart_nested_service_tasks(
|
||||
service: pydase.data_service.data_service.DataService | list[Any] | dict[Any, Any],
|
||||
) -> None:
|
||||
if isinstance(service, pydase.DataService):
|
||||
autostart_service_tasks(service)
|
||||
elif isinstance(service, list):
|
||||
for entry in service:
|
||||
autostart_nested_service_tasks(entry)
|
||||
elif isinstance(service, dict):
|
||||
for entry in service.values():
|
||||
autostart_nested_service_tasks(entry)
|
||||
145
src/pydase/task/decorator.py
Normal file
@@ -0,0 +1,145 @@
|
||||
import logging
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Generic, TypeVar, overload
|
||||
|
||||
from pydase.data_service.data_service import DataService
|
||||
from pydase.task.task import Task
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class PerInstanceTaskDescriptor(Generic[R]):
|
||||
"""
|
||||
A descriptor class that provides a unique [`Task`][pydase.task.task.Task] object
|
||||
for each instance of a [`DataService`][pydase.data_service.data_service.DataService]
|
||||
class.
|
||||
|
||||
The `PerInstanceTaskDescriptor` is used to transform an asynchronous function into a
|
||||
task that is managed independently for each instance of a `DataService` subclass.
|
||||
This allows tasks to be initialized, started, and stopped on a per-instance basis,
|
||||
providing better control over task execution within the service.
|
||||
|
||||
The `PerInstanceTaskDescriptor` is not intended to be used directly. Instead, it is
|
||||
used internally by the `@task` decorator to manage task objects for each instance of
|
||||
the service class.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
func: Callable[[Any], Coroutine[None, None, R]]
|
||||
| Callable[[], Coroutine[None, None, R]],
|
||||
autostart: bool = False,
|
||||
) -> None:
|
||||
self.__func = func
|
||||
self.__autostart = autostart
|
||||
self.__task_instances: dict[object, Task[R]] = {}
|
||||
|
||||
def __set_name__(self, owner: type[DataService], name: str) -> None:
|
||||
"""Stores the name of the task within the owning class. This method is called
|
||||
automatically when the descriptor is assigned to a class attribute.
|
||||
"""
|
||||
|
||||
self.__task_name = name
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: type[DataService]
|
||||
) -> "PerInstanceTaskDescriptor[R]":
|
||||
"""Returns the descriptor itself when accessed through the class."""
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: DataService, owner: type[DataService]) -> Task[R]:
|
||||
"""Returns the `Task` object associated with the specific `DataService`
|
||||
instance.
|
||||
If no task exists for the instance, a new `Task` object is created and stored
|
||||
in the `__task_instances` dictionary.
|
||||
"""
|
||||
|
||||
def __get__(
|
||||
self, instance: DataService | None, owner: type[DataService]
|
||||
) -> "Task[R] | PerInstanceTaskDescriptor[R]":
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
# Create a new Task object for this instance, using the function's name.
|
||||
if instance not in self.__task_instances:
|
||||
self.__task_instances[instance] = instance._initialise_new_objects(
|
||||
self.__task_name,
|
||||
Task(self.__func.__get__(instance, owner), autostart=self.__autostart),
|
||||
)
|
||||
|
||||
return self.__task_instances[instance]
|
||||
|
||||
|
||||
def task(
|
||||
*, autostart: bool = False
|
||||
) -> Callable[
|
||||
[
|
||||
Callable[[Any], Coroutine[None, None, R]]
|
||||
| Callable[[], Coroutine[None, None, R]]
|
||||
],
|
||||
PerInstanceTaskDescriptor[R],
|
||||
]:
|
||||
"""
|
||||
A decorator to define an asynchronous function as a per-instance task within a
|
||||
[`DataService`][pydase.DataService] class.
|
||||
|
||||
This decorator transforms an asynchronous function into a
|
||||
[`Task`][pydase.task.task.Task] object that is unique to each instance of the
|
||||
`DataService` class. The resulting `Task` object provides methods like `start()`
|
||||
and `stop()` to control the execution of the task, and manages the task's lifecycle
|
||||
independently for each instance of the service.
|
||||
|
||||
The decorator is particularly useful for defining tasks that need to run
|
||||
periodically or perform asynchronous operations, such as polling data sources,
|
||||
updating databases, or any recurring job that should be managed within the context
|
||||
of a `DataService`.
|
||||
time.
|
||||
|
||||
Args:
|
||||
autostart:
|
||||
If set to True, the task will automatically start when the service is
|
||||
initialized. Defaults to False.
|
||||
|
||||
Returns:
|
||||
A decorator that wraps an asynchronous function in a
|
||||
[`PerInstanceTaskDescriptor`][pydase.task.decorator.PerInstanceTaskDescriptor]
|
||||
object, which, when accessed, provides an instance-specific
|
||||
[`Task`][pydase.task.task.Task] object.
|
||||
|
||||
Example:
|
||||
```python
|
||||
import asyncio
|
||||
|
||||
import pydase
|
||||
from pydase.task.decorator import task
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
@task(autostart=True)
|
||||
async def my_task(self) -> None:
|
||||
while True:
|
||||
# Perform some periodic work
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
pydase.Server(service=service).run()
|
||||
```
|
||||
|
||||
In this example, `my_task` is defined as a task using the `@task` decorator, and
|
||||
it will start automatically when the service is initialized because
|
||||
`autostart=True` is set. You can manually start or stop the task using
|
||||
`service.my_task.start()` and `service.my_task.stop()`, respectively.
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
func: Callable[[Any], Coroutine[None, None, R]]
|
||||
| Callable[[], Coroutine[None, None, R]],
|
||||
) -> PerInstanceTaskDescriptor[R]:
|
||||
return PerInstanceTaskDescriptor(func, autostart=autostart)
|
||||
|
||||
return decorator
|
||||
148
src/pydase/task/task.py
Normal file
@@ -0,0 +1,148 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import (
|
||||
Generic,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import pydase.data_service.data_service
|
||||
from pydase.task.task_status import TaskStatus
|
||||
from pydase.utils.helpers import current_event_loop_exists
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class Task(pydase.data_service.data_service.DataService, Generic[R]):
|
||||
"""A class representing a task within the `pydase` framework.
|
||||
|
||||
The `Task` class wraps an asynchronous function and provides methods to manage its
|
||||
lifecycle, such as `start()` and `stop()`. It is typically used to perform periodic
|
||||
or recurring jobs in a [`DataService`][pydase.DataService], like reading
|
||||
sensor data, updating databases, or executing other background tasks.
|
||||
|
||||
When a function is decorated with the [`@task`][pydase.task.decorator.task]
|
||||
decorator, it is replaced by a `Task` instance that controls the execution of the
|
||||
original function.
|
||||
|
||||
Args:
|
||||
func:
|
||||
The asynchronous function that this task wraps. It must be a coroutine
|
||||
without arguments.
|
||||
autostart:
|
||||
If set to True, the task will automatically start when the service is
|
||||
initialized. Defaults to False.
|
||||
|
||||
Example:
|
||||
```python
|
||||
import asyncio
|
||||
|
||||
import pydase
|
||||
from pydase.task.decorator import task
|
||||
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
@task(autostart=True)
|
||||
async def my_task(self) -> None:
|
||||
while True:
|
||||
# Perform some periodic work
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
service = MyService()
|
||||
pydase.Server(service=service).run()
|
||||
```
|
||||
|
||||
In this example, `my_task` is defined as a task using the `@task` decorator, and
|
||||
it will start automatically when the service is initialized because
|
||||
`autostart=True` is set. You can manually start or stop the task using
|
||||
`service.my_task.start()` and `service.my_task.stop()`, respectively.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
func: Callable[[], Coroutine[None, None, R | None]],
|
||||
*,
|
||||
autostart: bool = False,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._autostart = autostart
|
||||
self._func_name = func.__name__
|
||||
self._func = func
|
||||
self._task: asyncio.Task[R | None] | None = None
|
||||
self._status = TaskStatus.NOT_RUNNING
|
||||
self._result: R | None = None
|
||||
|
||||
if not current_event_loop_exists():
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
else:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
|
||||
@property
|
||||
def autostart(self) -> bool:
|
||||
"""Defines if the task should be started automatically when the
|
||||
[`Server`][pydase.Server] starts."""
|
||||
return self._autostart
|
||||
|
||||
@property
|
||||
def status(self) -> TaskStatus:
|
||||
"""Returns the current status of the task."""
|
||||
return self._status
|
||||
|
||||
def start(self) -> None:
|
||||
"""Starts the asynchronous task if it is not already running."""
|
||||
if self._task:
|
||||
return
|
||||
|
||||
def task_done_callback(task: asyncio.Task[R | None]) -> None:
|
||||
"""Handles tasks that have finished.
|
||||
|
||||
Updates the task status, calls the defined callbacks, and logs and re-raises
|
||||
exceptions.
|
||||
"""
|
||||
|
||||
self._task = None
|
||||
self._status = TaskStatus.NOT_RUNNING
|
||||
|
||||
exception = task.exception()
|
||||
if exception is not None:
|
||||
# Handle the exception, or you can re-raise it.
|
||||
logger.error(
|
||||
"Task '%s' encountered an exception: %s: %s",
|
||||
self._func_name,
|
||||
type(exception).__name__,
|
||||
exception,
|
||||
)
|
||||
raise exception
|
||||
|
||||
self._result = task.result()
|
||||
|
||||
async def run_task() -> R | None:
|
||||
if inspect.iscoroutinefunction(self._func):
|
||||
logger.info("Starting task %r", self._func_name)
|
||||
self._status = TaskStatus.RUNNING
|
||||
res: Coroutine[None, None, R | None] = self._func()
|
||||
try:
|
||||
return await res
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Task '%s' was cancelled", self._func_name)
|
||||
return None
|
||||
logger.warning(
|
||||
"Cannot start task %r. Function has not been bound yet", self._func_name
|
||||
)
|
||||
return None
|
||||
|
||||
logger.info("Creating task %r", self._func_name)
|
||||
self._task = self._loop.create_task(run_task())
|
||||
self._task.add_done_callback(task_done_callback)
|
||||
|
||||
def stop(self) -> None:
|
||||
"""Stops the running asynchronous task by cancelling it."""
|
||||
|
||||
if self._task:
|
||||
self._task.cancel()
|
||||
8
src/pydase/task/task_status.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import enum
|
||||
|
||||
|
||||
class TaskStatus(enum.Enum):
|
||||
"""Possible statuses of a [`Task`][pydase.task.task.Task]."""
|
||||
|
||||
RUNNING = "running"
|
||||
NOT_RUNNING = "not_running"
|
||||
@@ -21,18 +21,20 @@ def convert_to_quantity(
|
||||
Convert a given value into a pint.Quantity object with the specified unit.
|
||||
|
||||
Args:
|
||||
value (QuantityDict | float | int | Quantity):
|
||||
value:
|
||||
The value to be converted into a Quantity object.
|
||||
|
||||
- If value is a float or int, it will be directly converted to the specified
|
||||
unit.
|
||||
- If value is a dict, it must have keys 'magnitude' and 'unit' to represent
|
||||
the value and unit.
|
||||
- If value is a Quantity object, it will remain unchanged.\n
|
||||
unit (str, optional): The target unit for conversion. If empty and value is not
|
||||
a Quantity object, it will assume a unitless quantity.
|
||||
unit:
|
||||
The target unit for conversion. If empty and value is not a Quantity object,
|
||||
it will assume a unitless quantity.
|
||||
|
||||
Returns:
|
||||
Quantity: The converted value as a pint.Quantity object with the specified unit.
|
||||
The converted value as a pint.Quantity object with the specified unit.
|
||||
|
||||
Examples:
|
||||
>>> convert_to_quantity(5, 'm')
|
||||
@@ -42,9 +44,9 @@ def convert_to_quantity(
|
||||
>>> convert_to_quantity(10.0 * u.units.V)
|
||||
<Quantity(10.0, 'volt')>
|
||||
|
||||
Notes:
|
||||
- If unit is not provided and value is a float or int, the resulting Quantity
|
||||
will be unitless.
|
||||
Note:
|
||||
If unit is not provided and value is a float or int, the resulting Quantity will
|
||||
be unitless.
|
||||
"""
|
||||
|
||||
if isinstance(value, int | float):
|
||||
|
||||
@@ -10,9 +10,9 @@ class FunctionDefinitionError(Exception):
|
||||
|
||||
|
||||
def frontend(func: Callable[..., Any]) -> Callable[..., Any]:
|
||||
"""
|
||||
Decorator to mark a DataService method for frontend rendering. Ensures that the
|
||||
method does not contain arguments, as they are not supported for frontend rendering.
|
||||
"""Decorator to mark a [`DataService`][pydase.DataService] method for frontend
|
||||
rendering. Ensures that the method does not contain arguments, as they are not
|
||||
supported for frontend rendering.
|
||||
"""
|
||||
|
||||
if function_has_arguments(func):
|
||||
|
||||
@@ -114,8 +114,6 @@ def get_class_and_instance_attributes(obj: object) -> dict[str, Any]:
|
||||
|
||||
If an attribute exists at both the instance and class level,the value from the
|
||||
instance attribute takes precedence.
|
||||
The __root__ object is removed as this will lead to endless recursion in the for
|
||||
loops.
|
||||
"""
|
||||
|
||||
return dict(chain(type(obj).__dict__.items(), obj.__dict__.items()))
|
||||
@@ -162,6 +160,12 @@ def get_object_attr_from_path(target_obj: Any, path: str) -> Any:
|
||||
return get_object_by_path_parts(target_obj, path_parts)
|
||||
|
||||
|
||||
def get_task_class() -> type:
|
||||
from pydase.task.task import Task
|
||||
|
||||
return Task
|
||||
|
||||
|
||||
def get_component_classes() -> list[type]:
|
||||
"""
|
||||
Returns references to the component classes in a list.
|
||||
@@ -196,3 +200,48 @@ def function_has_arguments(func: Callable[..., Any]) -> bool:
|
||||
|
||||
# Check if there are any parameters left which would indicate additional arguments.
|
||||
return len(parameters) > 0
|
||||
|
||||
|
||||
def is_descriptor(obj: object) -> bool:
|
||||
"""Check if an object is a descriptor."""
|
||||
|
||||
# Exclude functions, methods, builtins and properties
|
||||
if (
|
||||
inspect.isfunction(obj)
|
||||
or inspect.ismethod(obj)
|
||||
or inspect.isbuiltin(obj)
|
||||
or isinstance(obj, property)
|
||||
):
|
||||
return False
|
||||
|
||||
# Check if it has any descriptor methods
|
||||
return any(hasattr(obj, method) for method in ("__get__", "__set__", "__delete__"))
|
||||
|
||||
|
||||
def current_event_loop_exists() -> bool:
|
||||
"""Check if an event loop has been set."""
|
||||
import asyncio
|
||||
|
||||
return asyncio.get_event_loop_policy()._local._loop is not None # type: ignore
|
||||
|
||||
|
||||
def normalize_full_access_path_string(s: str) -> str:
|
||||
"""Normalizes a string representing a full access path by converting double quotes
|
||||
to single quotes.
|
||||
|
||||
This function is useful for ensuring consistency in strings that represent access
|
||||
paths containing dictionary keys, by replacing all double quotes (`"`) with single
|
||||
quotes (`'`).
|
||||
|
||||
Args:
|
||||
s (str): The input string to be normalized.
|
||||
|
||||
Returns:
|
||||
A new string with all double quotes replaced by single quotes.
|
||||
|
||||
Example:
|
||||
>>> normalize_full_access_path_string('dictionary["first"].my_task')
|
||||
"dictionary['first'].my_task"
|
||||
"""
|
||||
|
||||
return s.replace('"', "'")
|
||||
|
||||
@@ -33,7 +33,7 @@ LOGGING_CONFIG = {
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stderr",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
|
||||
@@ -6,7 +6,9 @@ from typing import TYPE_CHECKING, Any, NoReturn, cast
|
||||
import pydase
|
||||
import pydase.components
|
||||
import pydase.units as u
|
||||
from pydase.utils.helpers import get_component_classes
|
||||
from pydase.utils.helpers import (
|
||||
get_component_classes,
|
||||
)
|
||||
from pydase.utils.serialization.types import (
|
||||
SerializedDatetime,
|
||||
SerializedException,
|
||||
@@ -23,6 +25,7 @@ logger = logging.getLogger(__name__)
|
||||
class Deserializer:
|
||||
@classmethod
|
||||
def deserialize(cls, serialized_object: SerializedObject) -> Any:
|
||||
"""Deserialize `serialized_object` (a `dict`) to a Python object."""
|
||||
type_handler: dict[str | None, None | Callable[..., Any]] = {
|
||||
None: None,
|
||||
"int": cls.deserialize_primitive,
|
||||
@@ -48,9 +51,9 @@ class Deserializer:
|
||||
return handler(serialized_object)
|
||||
|
||||
# Custom types like Components or DataService classes
|
||||
component_class = cls.get_component_class(serialized_object["type"])
|
||||
if component_class:
|
||||
return cls.deserialize_component_type(serialized_object, component_class)
|
||||
service_base_class = cls.get_service_base_class(serialized_object["type"])
|
||||
if service_base_class:
|
||||
return cls.deserialize_data_service(serialized_object, service_base_class)
|
||||
|
||||
return None
|
||||
|
||||
@@ -109,11 +112,11 @@ class Deserializer:
|
||||
raise exception(serialized_object["value"])
|
||||
|
||||
@staticmethod
|
||||
def get_component_class(type_name: str | None) -> type | None:
|
||||
def get_service_base_class(type_name: str | None) -> type | None:
|
||||
for component_class in get_component_classes():
|
||||
if type_name == component_class.__name__:
|
||||
return component_class
|
||||
if type_name == "DataService":
|
||||
if type_name in ("DataService", "Task"):
|
||||
import pydase
|
||||
|
||||
return pydase.DataService
|
||||
@@ -136,7 +139,7 @@ class Deserializer:
|
||||
return property(get, set)
|
||||
|
||||
@classmethod
|
||||
def deserialize_component_type(
|
||||
def deserialize_data_service(
|
||||
cls, serialized_object: SerializedObject, base_class: type
|
||||
) -> Any:
|
||||
def create_proxy_class(serialized_object: SerializedObject) -> type:
|
||||
@@ -159,4 +162,5 @@ class Deserializer:
|
||||
|
||||
|
||||
def loads(serialized_object: SerializedObject) -> Any:
|
||||
"""Deserialize `serialized_object` (a `dict`) to a Python object."""
|
||||
return Deserializer.deserialize(serialized_object)
|
||||
|
||||
@@ -9,12 +9,14 @@ from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
|
||||
import pydase.units as u
|
||||
from pydase.data_service.abstract_data_service import AbstractDataService
|
||||
from pydase.data_service.task_manager import TaskStatus
|
||||
from pydase.task.task_status import TaskStatus
|
||||
from pydase.utils.decorators import render_in_frontend
|
||||
from pydase.utils.helpers import (
|
||||
get_attribute_doc,
|
||||
get_component_classes,
|
||||
get_data_service_class_reference,
|
||||
get_task_class,
|
||||
is_property_attribute,
|
||||
parse_full_access_path,
|
||||
parse_serialized_key,
|
||||
)
|
||||
@@ -40,6 +42,8 @@ from pydase.utils.serialization.types import (
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from pydase.client.proxy_class import ProxyClass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -52,8 +56,28 @@ class SerializationPathError(Exception):
|
||||
|
||||
|
||||
class Serializer:
|
||||
"""Serializes objects into
|
||||
[`SerializedObject`][pydase.utils.serialization.types.SerializedObject]
|
||||
representations.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def serialize_object(cls, obj: Any, access_path: str = "") -> SerializedObject: # noqa: C901
|
||||
"""Serialize `obj` to a
|
||||
[`SerializedObject`][pydase.utils.serialization.types.SerializedObject].
|
||||
|
||||
Args:
|
||||
obj:
|
||||
Object to be serialized.
|
||||
access_path:
|
||||
String corresponding to the full access path of the object. This will be
|
||||
prepended to the full_access_path in the SerializedObject entries.
|
||||
|
||||
Returns:
|
||||
Dictionary representation of `obj`.
|
||||
"""
|
||||
from pydase.client.client import ProxyClass
|
||||
|
||||
result: SerializedObject
|
||||
|
||||
if isinstance(obj, Exception):
|
||||
@@ -62,6 +86,9 @@ class Serializer:
|
||||
elif isinstance(obj, datetime):
|
||||
result = cls._serialize_datetime(obj, access_path=access_path)
|
||||
|
||||
elif isinstance(obj, ProxyClass):
|
||||
result = cls._serialize_proxy_class(obj, access_path=access_path)
|
||||
|
||||
elif isinstance(obj, AbstractDataService):
|
||||
result = cls._serialize_data_service(obj, access_path=access_path)
|
||||
|
||||
@@ -261,6 +288,10 @@ class Serializer:
|
||||
if component_base_cls:
|
||||
obj_type = component_base_cls.__name__ # type: ignore
|
||||
|
||||
elif isinstance(obj, get_task_class()):
|
||||
# Check if obj is a pydase task
|
||||
obj_type = "Task"
|
||||
|
||||
# Get the set of DataService class attributes
|
||||
data_service_attr_set = set(dir(get_data_service_class_reference()))
|
||||
# Get the set of the object attributes
|
||||
@@ -275,29 +306,15 @@ class Serializer:
|
||||
if key.startswith("_"):
|
||||
continue # Skip attributes that start with underscore
|
||||
|
||||
# Skip keys that start with "start_" or "stop_" and end with an async
|
||||
# method name
|
||||
if key.startswith(("start_", "stop_")) and key.split("_", 1)[1] in {
|
||||
name
|
||||
for name, _ in inspect.getmembers(
|
||||
obj, predicate=inspect.iscoroutinefunction
|
||||
)
|
||||
}:
|
||||
continue
|
||||
|
||||
val = getattr(obj, key)
|
||||
|
||||
path = f"{access_path}.{key}" if access_path else key
|
||||
serialized_object = cls.serialize_object(val, access_path=path)
|
||||
|
||||
# If there's a running task for this method
|
||||
if serialized_object["type"] == "method" and key in obj._task_manager.tasks:
|
||||
serialized_object["value"] = TaskStatus.RUNNING.name
|
||||
|
||||
value[key] = serialized_object
|
||||
|
||||
# If the DataService attribute is a property
|
||||
if isinstance(getattr(obj.__class__, key, None), property):
|
||||
if is_property_attribute(obj, key):
|
||||
prop: property = getattr(obj.__class__, key)
|
||||
value[key]["readonly"] = prop.fset is None
|
||||
value[key]["doc"] = get_attribute_doc(prop) # overwrite the doc
|
||||
@@ -311,8 +328,28 @@ class Serializer:
|
||||
"doc": doc,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _serialize_proxy_class(
|
||||
cls, obj: ProxyClass, access_path: str = ""
|
||||
) -> SerializedDataService:
|
||||
# Get serialization value from the remote service and adapt the full_access_path
|
||||
return add_prefix_to_full_access_path(obj.serialize(), access_path)
|
||||
|
||||
|
||||
def dump(obj: Any) -> SerializedObject:
|
||||
"""Serialize `obj` to a
|
||||
[`SerializedObject`][pydase.utils.serialization.types.SerializedObject].
|
||||
|
||||
The [`Serializer`][pydase.utils.serialization.serializer.Serializer] is used for
|
||||
encoding.
|
||||
|
||||
Args:
|
||||
obj:
|
||||
Object to be serialized.
|
||||
|
||||
Returns:
|
||||
Dictionary representation of `obj`.
|
||||
"""
|
||||
return Serializer.serialize_object(obj)
|
||||
|
||||
|
||||
@@ -321,12 +358,13 @@ def set_nested_value_by_path(
|
||||
) -> None:
|
||||
"""
|
||||
Set a value in a nested dictionary structure, which conforms to the serialization
|
||||
format used by `pydase.utils.serializer.Serializer`, using a dot-notation path.
|
||||
format used by [`Serializer`][pydase.utils.serialization.serializer.Serializer],
|
||||
using a dot-notation path.
|
||||
|
||||
Args:
|
||||
serialization_dict:
|
||||
The base dictionary representing data serialized with
|
||||
`pydase.utils.serializer.Serializer`.
|
||||
[`Serializer`][pydase.utils.serialization.serializer.Serializer].
|
||||
path:
|
||||
The dot-notation path (e.g., 'attr1.attr2[0].attr3') indicating where to
|
||||
set the value.
|
||||
@@ -334,8 +372,8 @@ def set_nested_value_by_path(
|
||||
The new value to set at the specified path.
|
||||
|
||||
Note:
|
||||
- If the index equals the length of the list, the function will append the
|
||||
serialized representation of the 'value' to the list.
|
||||
If the index equals the length of the list, the function will append the
|
||||
serialized representation of the 'value' to the list.
|
||||
"""
|
||||
|
||||
path_parts = parse_full_access_path(path)
|
||||
@@ -438,26 +476,24 @@ def get_container_item_by_key(
|
||||
) -> SerializedObject:
|
||||
"""
|
||||
Retrieve an item from a container specified by the passed key. Add an item to the
|
||||
container if allow_append is set to True.
|
||||
container if `allow_append` is set to `True`.
|
||||
|
||||
If specified keys or indexes do not exist, the function can append new elements to
|
||||
dictionaries and to lists if `allow_append` is True and the missing element is
|
||||
exactly the next sequential index (for lists).
|
||||
|
||||
Args:
|
||||
container: dict[str, SerializedObject] | list[SerializedObject]
|
||||
container:
|
||||
The container representing serialized data.
|
||||
key: str
|
||||
key:
|
||||
The key name representing the attribute in the dictionary, which may include
|
||||
direct keys or indexes (e.g., 'attr_name', '["key"]' or '[0]').
|
||||
allow_append: bool
|
||||
allow_append:
|
||||
Flag to allow appending a new entry if the specified index is out of range
|
||||
by exactly one position.
|
||||
|
||||
Returns:
|
||||
SerializedObject
|
||||
The dictionary or list item corresponding to the specified attribute and
|
||||
index.
|
||||
The dictionary or list item corresponding to the specified attribute and index.
|
||||
|
||||
Raises:
|
||||
SerializationPathError:
|
||||
@@ -485,13 +521,12 @@ def get_data_paths_from_serialized_object( # noqa: C901
|
||||
Recursively extracts full access paths from a serialized object.
|
||||
|
||||
Args:
|
||||
serialized_obj (SerializedObject):
|
||||
serialized_obj:
|
||||
The dictionary representing the serialization of an object. Produced by
|
||||
`pydase.utils.serializer.Serializer`.
|
||||
|
||||
Returns:
|
||||
list[str]:
|
||||
A list of strings, each representing a full access path in the serialized
|
||||
A list of strings, each representing a full access path in the serialized
|
||||
object.
|
||||
"""
|
||||
|
||||
@@ -532,12 +567,11 @@ def generate_serialized_data_paths(
|
||||
Recursively extracts full access paths from a serialized DataService class instance.
|
||||
|
||||
Args:
|
||||
data (dict[str, SerializedObject]):
|
||||
data:
|
||||
The value of the "value" key of a serialized DataService class instance.
|
||||
|
||||
Returns:
|
||||
list[str]:
|
||||
A list of strings, each representing a full access path in the serialized
|
||||
A list of strings, each representing a full access path in the serialized
|
||||
object.
|
||||
"""
|
||||
|
||||
@@ -551,8 +585,67 @@ def generate_serialized_data_paths(
|
||||
return paths
|
||||
|
||||
|
||||
def add_prefix_to_full_access_path(
|
||||
serialized_obj: SerializedObject, prefix: str
|
||||
) -> Any:
|
||||
"""Recursively adds a specified prefix to all full access paths of the serialized
|
||||
object.
|
||||
|
||||
Args:
|
||||
serialized_obj:
|
||||
The serialized object to process.
|
||||
prefix:
|
||||
The prefix string to prepend to each full access path.
|
||||
|
||||
Returns:
|
||||
The modified serialized object with the prefix added to all full access paths.
|
||||
|
||||
Example:
|
||||
```python
|
||||
>>> serialized_obj = {
|
||||
... "full_access_path": "",
|
||||
... "value": {
|
||||
... "item": {
|
||||
... "full_access_path": "some_item_path",
|
||||
... "value": 1.0
|
||||
... }
|
||||
... }
|
||||
... }
|
||||
...
|
||||
... modified_data = add_prefix_to_full_access_path(serialized_obj, 'prefix')
|
||||
{"full_access_path": "prefix", "value": {"item": {"full_access_path":
|
||||
"prefix.some_item_path", "value": 1.0}}}
|
||||
```
|
||||
"""
|
||||
|
||||
try:
|
||||
if serialized_obj.get("full_access_path", None) is not None:
|
||||
serialized_obj["full_access_path"] = (
|
||||
prefix + "." + serialized_obj["full_access_path"]
|
||||
if serialized_obj["full_access_path"] != ""
|
||||
else prefix
|
||||
)
|
||||
|
||||
if isinstance(serialized_obj["value"], list):
|
||||
for value in serialized_obj["value"]:
|
||||
add_prefix_to_full_access_path(cast(SerializedObject, value), prefix)
|
||||
|
||||
elif isinstance(serialized_obj["value"], dict):
|
||||
for value in cast(
|
||||
dict[str, SerializedObject], serialized_obj["value"]
|
||||
).values():
|
||||
add_prefix_to_full_access_path(cast(SerializedObject, value), prefix)
|
||||
except (TypeError, KeyError, AttributeError):
|
||||
# passed dictionary is not a serialized object
|
||||
pass
|
||||
return serialized_obj
|
||||
|
||||
|
||||
def serialized_dict_is_nested_object(serialized_dict: SerializedObject) -> bool:
|
||||
value = serialized_dict["value"]
|
||||
# We are excluding Quantity here as the value corresponding to the "value" key is
|
||||
# a dictionary of the form {"magnitude": ..., "unit": ...}
|
||||
return serialized_dict["type"] != "Quantity" and (isinstance(value, dict | list))
|
||||
|
||||
|
||||
__all__ = ["Serializer", "dump"]
|
||||
|
||||
@@ -98,7 +98,9 @@ class SerializedException(SerializedObjectBase):
|
||||
type: Literal["Exception"]
|
||||
|
||||
|
||||
DataServiceTypes = Literal["DataService", "Image", "NumberSlider", "DeviceConnection"]
|
||||
DataServiceTypes = Literal[
|
||||
"DataService", "Image", "NumberSlider", "DeviceConnection", "Task"
|
||||
]
|
||||
|
||||
|
||||
class SerializedDataService(SerializedObjectBase):
|
||||
@@ -123,3 +125,21 @@ SerializedObject = (
|
||||
| SerializedQuantity
|
||||
| SerializedNoValue
|
||||
)
|
||||
"""
|
||||
This type can be any of the following:
|
||||
|
||||
- SerializedBool
|
||||
- SerializedFloat
|
||||
- SerializedInteger
|
||||
- SerializedString
|
||||
- SerializedDatetime
|
||||
- SerializedList
|
||||
- SerializedDict
|
||||
- SerializedNoneType
|
||||
- SerializedMethod
|
||||
- SerializedException
|
||||
- SerializedDataService
|
||||
- SerializedEnum
|
||||
- SerializedQuantity
|
||||
- SerializedNoValue
|
||||
"""
|
||||
|
||||
@@ -41,11 +41,14 @@ def pydase_client() -> Generator[pydase.Client, None, Any]:
|
||||
def my_method(self, input_str: str) -> str:
|
||||
return input_str
|
||||
|
||||
async def my_async_method(self, input_str: str) -> str:
|
||||
return input_str
|
||||
|
||||
server = pydase.Server(MyService(), web_port=9999)
|
||||
thread = threading.Thread(target=server.run, daemon=True)
|
||||
thread.start()
|
||||
|
||||
client = pydase.Client(hostname="localhost", port=9999)
|
||||
client = pydase.Client(url="ws://localhost:9999")
|
||||
|
||||
yield client
|
||||
|
||||
@@ -79,6 +82,14 @@ def test_method_execution(pydase_client: pydase.Client) -> None:
|
||||
pydase_client.proxy.my_method(kwarg="hello")
|
||||
|
||||
|
||||
def test_async_method_execution(pydase_client: pydase.Client) -> None:
|
||||
assert pydase_client.proxy.my_async_method("My return string") == "My return string"
|
||||
assert (
|
||||
pydase_client.proxy.my_async_method(input_str="My return string")
|
||||
== "My return string"
|
||||
)
|
||||
|
||||
|
||||
def test_nested_service(pydase_client: pydase.Client) -> None:
|
||||
assert pydase_client.proxy.sub_service.name == "SubService"
|
||||
pydase_client.proxy.sub_service.name = "New name"
|
||||
@@ -138,3 +149,15 @@ def test_tab_completion(pydase_client: pydase.Client) -> None:
|
||||
"sub_service",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_context_manager(pydase_client: pydase.Client) -> None:
|
||||
client = pydase.Client(url="ws://localhost:9999")
|
||||
|
||||
assert client.proxy.connected
|
||||
|
||||
with client:
|
||||
client.proxy.my_property = 1337.01
|
||||
assert client.proxy.my_property == 1337.01
|
||||
|
||||
assert not client.proxy.connected
|
||||
|
||||
107
tests/client/test_reconnection.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import threading
|
||||
from collections.abc import Callable, Generator
|
||||
from typing import Any
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
import socketio.exceptions
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def pydase_restartable_server() -> (
|
||||
Generator[
|
||||
tuple[
|
||||
pydase.Server,
|
||||
threading.Thread,
|
||||
pydase.DataService,
|
||||
Callable[
|
||||
[pydase.Server, threading.Thread, pydase.DataService],
|
||||
tuple[pydase.Server, threading.Thread],
|
||||
],
|
||||
],
|
||||
None,
|
||||
Any,
|
||||
]
|
||||
):
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._name = "MyService"
|
||||
self._my_property = 12.1
|
||||
|
||||
@property
|
||||
def my_property(self) -> float:
|
||||
return self._my_property
|
||||
|
||||
@my_property.setter
|
||||
def my_property(self, value: float) -> None:
|
||||
self._my_property = value
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
service_instance = MyService()
|
||||
server = pydase.Server(service_instance, web_port=9999)
|
||||
thread = threading.Thread(target=server.run, daemon=True)
|
||||
thread.start()
|
||||
|
||||
def restart(
|
||||
server: pydase.Server,
|
||||
thread: threading.Thread,
|
||||
service_instance: pydase.DataService,
|
||||
) -> tuple[pydase.Server, threading.Thread]:
|
||||
server.handle_exit()
|
||||
thread.join()
|
||||
|
||||
server = pydase.Server(service_instance, web_port=9999)
|
||||
new_thread = threading.Thread(target=server.run, daemon=True)
|
||||
new_thread.start()
|
||||
|
||||
return server, new_thread
|
||||
|
||||
yield server, thread, service_instance, restart
|
||||
|
||||
server.handle_exit()
|
||||
thread.join()
|
||||
|
||||
|
||||
def test_reconnection(
|
||||
pydase_restartable_server: tuple[
|
||||
pydase.Server,
|
||||
threading.Thread,
|
||||
pydase.DataService,
|
||||
Callable[
|
||||
[pydase.Server, threading.Thread, pydase.DataService],
|
||||
tuple[pydase.Server, threading.Thread],
|
||||
],
|
||||
],
|
||||
) -> None:
|
||||
client = pydase.Client(
|
||||
url="ws://localhost:9999",
|
||||
sio_client_kwargs={
|
||||
"reconnection": False,
|
||||
},
|
||||
)
|
||||
client_2 = pydase.Client(
|
||||
url="ws://localhost:9999",
|
||||
sio_client_kwargs={
|
||||
"reconnection_attempts": 1,
|
||||
},
|
||||
)
|
||||
|
||||
server, thread, service_instance, restart = pydase_restartable_server
|
||||
service_instance._name = "New service name"
|
||||
|
||||
server, thread = restart(server, thread, service_instance)
|
||||
|
||||
with pytest.raises(socketio.exceptions.BadNamespaceError):
|
||||
client.proxy.name
|
||||
client_2.proxy.name
|
||||
|
||||
client.proxy.reconnect()
|
||||
client_2.proxy.reconnect()
|
||||
|
||||
# the service proxies successfully reconnect and get the new service name
|
||||
assert client.proxy.name == "New service name"
|
||||
assert client_2.proxy.name == "New service name"
|
||||
@@ -3,6 +3,7 @@ import asyncio
|
||||
import pydase
|
||||
import pydase.components.device_connection
|
||||
import pytest
|
||||
from pydase.task.autostart import autostart_service_tasks
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
@@ -19,10 +20,9 @@ async def test_reconnection(caplog: LogCaptureFixture) -> None:
|
||||
self._connected = True
|
||||
|
||||
service_instance = MyService()
|
||||
autostart_service_tasks(service_instance)
|
||||
|
||||
assert service_instance._connected is False
|
||||
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert service_instance._connected is True
|
||||
|
||||
@@ -36,8 +36,7 @@ def test_unexpected_type_change_warning(caplog: LogCaptureFixture) -> None:
|
||||
|
||||
|
||||
def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||
class SubService(DataService):
|
||||
...
|
||||
class SubService(DataService): ...
|
||||
|
||||
class SomeEnum(Enum):
|
||||
HI = 0
|
||||
@@ -57,11 +56,9 @@ def test_basic_inheritance_warning(caplog: LogCaptureFixture) -> None:
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
def some_method(self) -> None:
|
||||
...
|
||||
def some_method(self) -> None: ...
|
||||
|
||||
async def some_task(self) -> None:
|
||||
...
|
||||
async def some_task(self) -> None: ...
|
||||
|
||||
ServiceClass()
|
||||
|
||||
@@ -129,17 +126,12 @@ def test_exposing_methods(caplog: LogCaptureFixture) -> None:
|
||||
return "some method"
|
||||
|
||||
class ClassWithTask(pydase.DataService):
|
||||
async def some_task(self, sleep_time: int) -> None:
|
||||
pass
|
||||
@frontend
|
||||
def some_method(self) -> str:
|
||||
return "some method"
|
||||
|
||||
ClassWithTask()
|
||||
|
||||
assert (
|
||||
"Async function 'some_task' is defined with at least one argument. If you want "
|
||||
"to use it as a task, remove the argument(s) from the function definition."
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
def test_dynamically_added_attribute(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(DataService):
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
|
||||
@@ -33,35 +32,3 @@ def test_nested_attributes_cache_callback() -> None:
|
||||
]
|
||||
== "Ciao"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_task_status_update() -> None:
|
||||
class ServiceClass(pydase.DataService):
|
||||
name = "World"
|
||||
|
||||
async def my_method(self) -> None:
|
||||
pass
|
||||
|
||||
service_instance = ServiceClass()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
assert (
|
||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
|
||||
== "method"
|
||||
)
|
||||
assert (
|
||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
|
||||
is None
|
||||
)
|
||||
|
||||
service_instance.start_my_method() # type: ignore
|
||||
assert (
|
||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["type"]
|
||||
== "method"
|
||||
)
|
||||
assert (
|
||||
state_manager.cache_manager.get_value_dict_from_cache("my_method")["value"]
|
||||
== "RUNNING"
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ import pydase
|
||||
import pytest
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.utils.serialization.serializer import SerializationError
|
||||
from pydase.utils.serialization.serializer import SerializationError, dump
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
@@ -146,3 +146,41 @@ def test_private_attribute_does_not_have_to_be_serializable() -> None:
|
||||
service_instance.change_publ_attr()
|
||||
|
||||
service_instance.change_priv_attr()
|
||||
|
||||
|
||||
def test_normalized_attr_path_in_dependent_property_changes(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
class SubService(pydase.DataService):
|
||||
_prop = 10.0
|
||||
|
||||
@property
|
||||
def prop(self) -> float:
|
||||
return self._prop
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.service_dict = {"one": SubService()}
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service=service_instance)
|
||||
observer = DataServiceObserver(state_manager=state_manager)
|
||||
|
||||
assert observer.property_deps_dict["service_dict['one']._prop"] == [
|
||||
"service_dict['one'].prop"
|
||||
]
|
||||
|
||||
# We can use dict key path encoded with double quotes
|
||||
state_manager.set_service_attribute_value_by_path(
|
||||
'service_dict["one"]._prop', dump(11.0)
|
||||
)
|
||||
assert service_instance.service_dict["one"].prop == 11.0
|
||||
assert "'service_dict[\"one\"].prop' changed to '11.0'" in caplog.text
|
||||
|
||||
# We can use dict key path encoded with single quotes
|
||||
state_manager.set_service_attribute_value_by_path(
|
||||
"service_dict['one']._prop", dump(12.0)
|
||||
)
|
||||
assert service_instance.service_dict["one"].prop == 12.0
|
||||
assert "'service_dict[\"one\"].prop' changed to '12.0'" in caplog.text
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
logger = logging.getLogger("pydase")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_autostart_task_callback(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (), # type: ignore
|
||||
"my_other_task": (), # type: ignore
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
# Your test code here
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
assert "'my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_DataService_subclass_autostart_task_callback(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (),
|
||||
"my_other_task": (),
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_service = MySubService()
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert "'sub_service.my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
assert "'sub_service.my_other_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_DataService_subclass_list_autostart_task_callback(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._autostart_tasks = { # type: ignore
|
||||
"my_task": (),
|
||||
"my_other_task": (),
|
||||
}
|
||||
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
|
||||
async def my_other_task(self) -> None:
|
||||
logger.info("Triggered other task.")
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_services_list = [MySubService() for i in range(2)]
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance._task_manager.start_autostart_tasks()
|
||||
|
||||
assert (
|
||||
"'sub_services_list[0].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[0].my_other_task' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_other_task' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_start_and_stop_task_methods(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(pydase.DataService):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
async def my_task(self) -> None:
|
||||
while True:
|
||||
logger.debug("Logging message")
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
# Your test code here
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
service_instance.start_my_task()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert "'my_task' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
assert "Logging message" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.stop_my_task()
|
||||
await asyncio.sleep(0.01)
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
@@ -6,6 +6,7 @@ from typing import Any
|
||||
import aiohttp
|
||||
import pydase
|
||||
import pytest
|
||||
from pydase.utils.serialization.deserializer import Deserializer
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -40,7 +41,10 @@ def pydase_server() -> Generator[None, None, None]:
|
||||
return self._readonly_attr
|
||||
|
||||
def my_method(self, input_str: str) -> str:
|
||||
return input_str
|
||||
return f"{input_str}: my_method"
|
||||
|
||||
async def my_async_method(self, input_str: str) -> str:
|
||||
return f"{input_str}: my_async_method"
|
||||
|
||||
server = pydase.Server(MyService(), web_port=9998)
|
||||
thread = threading.Thread(target=server.run, daemon=True)
|
||||
@@ -192,3 +196,57 @@ async def test_update_value(
|
||||
resp = await session.get(f"/api/v1/get_value?access_path={access_path}")
|
||||
content = json.loads(await resp.text())
|
||||
assert content == new_value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"access_path, expected, ok",
|
||||
[
|
||||
(
|
||||
"my_method",
|
||||
"Hello from function: my_method",
|
||||
True,
|
||||
),
|
||||
(
|
||||
"my_async_method",
|
||||
"Hello from function: my_async_method",
|
||||
True,
|
||||
),
|
||||
(
|
||||
"invalid_method",
|
||||
None,
|
||||
False,
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.asyncio()
|
||||
async def test_trigger_method(
|
||||
access_path: str,
|
||||
expected: Any,
|
||||
ok: bool,
|
||||
pydase_server: pydase.DataService,
|
||||
) -> None:
|
||||
async with aiohttp.ClientSession("http://localhost:9998") as session:
|
||||
resp = await session.put(
|
||||
"/api/v1/trigger_method",
|
||||
json={
|
||||
"access_path": access_path,
|
||||
"kwargs": {
|
||||
"full_access_path": "",
|
||||
"type": "dict",
|
||||
"value": {
|
||||
"input_str": {
|
||||
"docs": None,
|
||||
"full_access_path": "",
|
||||
"readonly": False,
|
||||
"type": "str",
|
||||
"value": "Hello from function",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert resp.ok == ok
|
||||
|
||||
if resp.ok:
|
||||
content = Deserializer.deserialize(json.loads(await resp.text()))
|
||||
assert content == expected
|
||||
|
||||
291
tests/task/test_task.py
Normal file
@@ -0,0 +1,291 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import pydase
|
||||
import pytest
|
||||
from pydase.data_service.data_service_observer import DataServiceObserver
|
||||
from pydase.data_service.state_manager import StateManager
|
||||
from pydase.task.autostart import autostart_service_tasks
|
||||
from pydase.task.decorator import task
|
||||
from pydase.task.task_status import TaskStatus
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
logger = logging.getLogger("pydase")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_start_and_stop_task(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(pydase.DataService):
|
||||
@task()
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Your test code here
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
autostart_service_tasks(service_instance)
|
||||
await asyncio.sleep(0.1)
|
||||
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||
|
||||
service_instance.my_task.start()
|
||||
await asyncio.sleep(0.1)
|
||||
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||
|
||||
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
assert "Triggered task." in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.my_task.stop()
|
||||
await asyncio.sleep(0.1)
|
||||
assert service_instance.my_task.status == TaskStatus.NOT_RUNNING
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_autostart_task(caplog: LogCaptureFixture) -> None:
|
||||
class MyService(pydase.DataService):
|
||||
@task(autostart=True)
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Your test code here
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
autostart_service_tasks(service_instance)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
assert service_instance.my_task.status == TaskStatus.RUNNING
|
||||
|
||||
assert "'my_task.status' changed to 'TaskStatus.RUNNING'" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_nested_list_autostart_task(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
@task(autostart=True)
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_services_list = [MySubService() for i in range(2)]
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
autostart_service_tasks(service_instance)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
|
||||
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
|
||||
|
||||
assert (
|
||||
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_nested_dict_autostart_task(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
@task(autostart=True)
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_services_dict = {"first": MySubService(), "second": MySubService()}
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
autostart_service_tasks(service_instance)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
assert (
|
||||
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
|
||||
)
|
||||
assert (
|
||||
service_instance.sub_services_dict["second"].my_task.status
|
||||
== TaskStatus.RUNNING
|
||||
)
|
||||
|
||||
assert (
|
||||
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="function")
|
||||
async def test_manual_start_with_multiple_service_instances(
|
||||
caplog: LogCaptureFixture,
|
||||
) -> None:
|
||||
class MySubService(pydase.DataService):
|
||||
@task()
|
||||
async def my_task(self) -> None:
|
||||
logger.info("Triggered task.")
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
class MyService(pydase.DataService):
|
||||
sub_services_list = [MySubService() for i in range(2)]
|
||||
sub_services_dict = {"first": MySubService(), "second": MySubService()}
|
||||
|
||||
service_instance = MyService()
|
||||
state_manager = StateManager(service_instance)
|
||||
DataServiceObserver(state_manager)
|
||||
|
||||
autostart_service_tasks(service_instance)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
assert (
|
||||
service_instance.sub_services_list[0].my_task.status == TaskStatus.NOT_RUNNING
|
||||
)
|
||||
assert (
|
||||
service_instance.sub_services_list[1].my_task.status == TaskStatus.NOT_RUNNING
|
||||
)
|
||||
assert (
|
||||
service_instance.sub_services_dict["first"].my_task.status
|
||||
== TaskStatus.NOT_RUNNING
|
||||
)
|
||||
assert (
|
||||
service_instance.sub_services_dict["second"].my_task.status
|
||||
== TaskStatus.NOT_RUNNING
|
||||
)
|
||||
|
||||
service_instance.sub_services_list[0].my_task.start()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert service_instance.sub_services_list[0].my_task.status == TaskStatus.RUNNING
|
||||
assert (
|
||||
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
|
||||
service_instance.sub_services_list[0].my_task.stop()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.sub_services_list[1].my_task.start()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert service_instance.sub_services_list[1].my_task.status == TaskStatus.RUNNING
|
||||
assert (
|
||||
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
|
||||
service_instance.sub_services_list[1].my_task.stop()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.sub_services_dict["first"].my_task.start()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert (
|
||||
service_instance.sub_services_dict["first"].my_task.status == TaskStatus.RUNNING
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
|
||||
service_instance.sub_services_dict["first"].my_task.stop()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
service_instance.sub_services_dict["second"].my_task.start()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert (
|
||||
service_instance.sub_services_dict["second"].my_task.status
|
||||
== TaskStatus.RUNNING
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[0].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_list[1].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"first\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
not in caplog.text
|
||||
)
|
||||
assert (
|
||||
"'sub_services_dict[\"second\"].my_task.status' changed to 'TaskStatus.RUNNING'"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
service_instance.sub_services_dict["second"].my_task.stop()
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
assert "Task 'my_task' was cancelled" in caplog.text
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
@@ -8,11 +7,12 @@ import pydase
|
||||
import pydase.units as u
|
||||
import pytest
|
||||
from pydase.components.coloured_enum import ColouredEnum
|
||||
from pydase.data_service.task_manager import TaskStatus
|
||||
from pydase.task.task_status import TaskStatus
|
||||
from pydase.utils.decorators import frontend
|
||||
from pydase.utils.serialization.serializer import (
|
||||
SerializationPathError,
|
||||
SerializedObject,
|
||||
add_prefix_to_full_access_path,
|
||||
dump,
|
||||
generate_serialized_data_paths,
|
||||
get_container_item_by_key,
|
||||
@@ -214,11 +214,9 @@ async def test_method_serialization() -> None:
|
||||
return "some method"
|
||||
|
||||
async def some_task(self) -> None:
|
||||
while True:
|
||||
await asyncio.sleep(10)
|
||||
pass
|
||||
|
||||
instance = ClassWithMethod()
|
||||
instance.start_some_task() # type: ignore
|
||||
|
||||
assert dump(instance)["value"] == {
|
||||
"some_method": {
|
||||
@@ -234,7 +232,7 @@ async def test_method_serialization() -> None:
|
||||
"some_task": {
|
||||
"full_access_path": "some_task",
|
||||
"type": "method",
|
||||
"value": TaskStatus.RUNNING.name,
|
||||
"value": None,
|
||||
"readonly": True,
|
||||
"doc": None,
|
||||
"async": True,
|
||||
@@ -1073,3 +1071,156 @@ def test_get_data_paths_from_serialized_object(obj: Any, expected: list[str]) ->
|
||||
)
|
||||
def test_generate_serialized_data_paths(obj: Any, expected: list[str]) -> None:
|
||||
assert generate_serialized_data_paths(dump(obj=obj)["value"]) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"serialized_obj, prefix, expected",
|
||||
[
|
||||
(
|
||||
{
|
||||
"full_access_path": "new_attr",
|
||||
"value": {
|
||||
"name": {
|
||||
"full_access_path": "new_attr.name",
|
||||
"value": "MyService",
|
||||
}
|
||||
},
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix.new_attr",
|
||||
"value": {
|
||||
"name": {
|
||||
"full_access_path": "prefix.new_attr.name",
|
||||
"value": "MyService",
|
||||
}
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"full_access_path": "new_attr",
|
||||
"value": [
|
||||
{
|
||||
"full_access_path": "new_attr[0]",
|
||||
"value": 1.0,
|
||||
}
|
||||
],
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix.new_attr",
|
||||
"value": [
|
||||
{
|
||||
"full_access_path": "prefix.new_attr[0]",
|
||||
"value": 1.0,
|
||||
}
|
||||
],
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"full_access_path": "new_attr",
|
||||
"value": {
|
||||
"key": {
|
||||
"full_access_path": 'new_attr["key"]',
|
||||
"value": 1.0,
|
||||
}
|
||||
},
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix.new_attr",
|
||||
"value": {
|
||||
"key": {
|
||||
"full_access_path": 'prefix.new_attr["key"]',
|
||||
"value": 1.0,
|
||||
}
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"full_access_path": "new_attr",
|
||||
"value": {"magnitude": 10, "unit": "meter"},
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix.new_attr",
|
||||
"value": {"magnitude": 10, "unit": "meter"},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"full_access_path": "quantity_list",
|
||||
"value": [
|
||||
{
|
||||
"full_access_path": "quantity_list[0]",
|
||||
"value": {"magnitude": 1.0, "unit": "A"},
|
||||
}
|
||||
],
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix.quantity_list",
|
||||
"value": [
|
||||
{
|
||||
"full_access_path": "prefix.quantity_list[0]",
|
||||
"value": {"magnitude": 1.0, "unit": "A"},
|
||||
}
|
||||
],
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"full_access_path": "",
|
||||
"value": {
|
||||
"dict_attr": {
|
||||
"type": "dict",
|
||||
"full_access_path": "dict_attr",
|
||||
"value": {
|
||||
"foo": {
|
||||
"full_access_path": 'dict_attr["foo"]',
|
||||
"type": "dict",
|
||||
"value": {
|
||||
"some_int": {
|
||||
"full_access_path": 'dict_attr["foo"].some_int',
|
||||
"type": "int",
|
||||
"value": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"prefix",
|
||||
{
|
||||
"full_access_path": "prefix",
|
||||
"value": {
|
||||
"dict_attr": {
|
||||
"type": "dict",
|
||||
"full_access_path": "prefix.dict_attr",
|
||||
"value": {
|
||||
"foo": {
|
||||
"full_access_path": 'prefix.dict_attr["foo"]',
|
||||
"type": "dict",
|
||||
"value": {
|
||||
"some_int": {
|
||||
"full_access_path": 'prefix.dict_attr["foo"].some_int',
|
||||
"type": "int",
|
||||
"value": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_add_prefix_to_full_access_path(
|
||||
serialized_obj: SerializedObject, prefix: str, expected: SerializedObject
|
||||
) -> None:
|
||||
assert add_prefix_to_full_access_path(serialized_obj, prefix) == expected
|
||||
|
||||