mirror of
https://github.com/bec-project/bec_widgets.git
synced 2025-12-31 03:01:18 +01:00
test(history): add history message helper methods to conftest
This commit is contained in:
@@ -1,6 +1,10 @@
|
||||
from unittest import mock
|
||||
import json
|
||||
import time
|
||||
|
||||
import h5py
|
||||
import numpy as np
|
||||
import pytest
|
||||
from bec_lib import messages
|
||||
from pytestqt.exceptions import TimeoutError as QtBotTimeoutError
|
||||
from qtpy.QtWidgets import QApplication
|
||||
|
||||
@@ -83,3 +87,110 @@ def create_widget(qtbot, widget, *args, **kwargs):
|
||||
qtbot.addWidget(widget)
|
||||
qtbot.waitExposed(widget)
|
||||
return widget
|
||||
|
||||
|
||||
def create_history_file(file_path, data: dict, metadata: dict) -> messages.ScanHistoryMessage:
|
||||
"""
|
||||
Helper to create a history file with the given data.
|
||||
The data should contain readout groups, e.g.
|
||||
{
|
||||
"baseline": {"samx": {"samx": {"value": [1, 2, 3], "timestamp": [100, 200, 300]}},
|
||||
"monitored": {"bpm4i": {"bpm4i": {"value": [5, 6, 7], "timestamp": [101, 201, 301]}}},
|
||||
"async": {"async_device": {"async_device": {"value": [1, 2, 3], "timestamp": [11, 21, 31]}}},
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
with h5py.File(file_path, "w") as f:
|
||||
_metadata = f.create_group("entry/collection/metadata")
|
||||
_metadata.create_dataset("sample_name", data="test_sample")
|
||||
metadata_bec = f.create_group("entry/collection/metadata/bec")
|
||||
for key, value in metadata.items():
|
||||
if isinstance(value, dict):
|
||||
metadata_bec.create_group(key)
|
||||
for sub_key, sub_value in value.items():
|
||||
if isinstance(sub_value, list):
|
||||
sub_value = json.dumps(sub_value)
|
||||
metadata_bec[key].create_dataset(sub_key, data=sub_value)
|
||||
elif isinstance(sub_value, dict):
|
||||
for sub_sub_key, sub_sub_value in sub_value.items():
|
||||
sub_sub_group = metadata_bec[key].create_group(sub_key)
|
||||
if isinstance(sub_sub_value, list):
|
||||
sub_sub_value = json.dumps(sub_sub_value)
|
||||
sub_sub_group.create_dataset(sub_sub_key, data=sub_sub_value)
|
||||
else:
|
||||
metadata_bec[key].create_dataset(sub_key, data=sub_value)
|
||||
else:
|
||||
metadata_bec.create_dataset(key, data=value)
|
||||
for group, devices in data.items():
|
||||
readout_group = f.create_group(f"entry/collection/readout_groups/{group}")
|
||||
|
||||
for device, device_data in devices.items():
|
||||
dev_group = f.create_group(f"entry/collection/devices/{device}")
|
||||
for signal, signal_data in device_data.items():
|
||||
signal_group = dev_group.create_group(signal)
|
||||
for signal_key, signal_values in signal_data.items():
|
||||
signal_group.create_dataset(signal_key, data=signal_values)
|
||||
|
||||
readout_group[device] = h5py.SoftLink(f"/entry/collection/devices/{device}")
|
||||
msg = messages.ScanHistoryMessage(
|
||||
scan_id=metadata["scan_id"],
|
||||
scan_name=metadata["scan_name"],
|
||||
exit_status=metadata["exit_status"],
|
||||
file_path=file_path,
|
||||
scan_number=metadata["scan_number"],
|
||||
dataset_number=metadata["dataset_number"],
|
||||
start_time=time.time(),
|
||||
end_time=time.time(),
|
||||
num_points=metadata["num_points"],
|
||||
request_inputs=metadata["request_inputs"],
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def grid_scan_history_msg(tmpdir):
|
||||
x_grid, y_grid = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))
|
||||
|
||||
x_flat = x_grid.T.ravel()
|
||||
y_flat = y_grid.T.ravel()
|
||||
positions = np.vstack((x_flat, y_flat)).T
|
||||
num_points = len(positions)
|
||||
data = {
|
||||
"baseline": {"bpm1a": {"bpm1a": {"value": [1], "timestamp": [100]}}},
|
||||
"monitored": {
|
||||
"bpm4i": {
|
||||
"bpm4i": {
|
||||
"value": np.random.rand(num_points),
|
||||
"timestamp": np.random.rand(num_points),
|
||||
}
|
||||
},
|
||||
"samx": {"samx": {"value": x_flat, "timestamp": np.random.rand(num_points)}},
|
||||
"samy": {"samy": {"value": y_flat, "timestamp": np.random.rand(num_points)}},
|
||||
},
|
||||
"async": {
|
||||
"async_device": {
|
||||
"async_device": {
|
||||
"value": np.random.rand(num_points * 10),
|
||||
"timestamp": np.random.rand(num_points * 10),
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
metadata = {
|
||||
"scan_id": "test_scan",
|
||||
"scan_name": "grid_scan",
|
||||
"scan_type": "step",
|
||||
"exit_status": "closed",
|
||||
"scan_number": 1,
|
||||
"dataset_number": 1,
|
||||
"request_inputs": {
|
||||
"arg_bundle": ["samx", -5, 5, 10, "samy", -5, 5, 10],
|
||||
"kwargs": {"relative": True},
|
||||
},
|
||||
"positions": positions.tolist(),
|
||||
"num_points": num_points,
|
||||
}
|
||||
|
||||
file_path = str(tmpdir.join("scan_1.h5"))
|
||||
return create_history_file(file_path, data, metadata)
|
||||
|
||||
Reference in New Issue
Block a user