mirror of
https://github.com/bec-project/bec_widgets.git
synced 2025-07-12 18:51:50 +02:00
feat(waveform): large async dataset warning popup
This commit is contained in:
@ -3970,6 +3970,48 @@ class Waveform(RPCBase):
|
||||
The color palette of the figure widget.
|
||||
"""
|
||||
|
||||
@property
|
||||
@rpc_call
|
||||
def skip_large_dataset_warning(self) -> "bool":
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
|
||||
@skip_large_dataset_warning.setter
|
||||
@rpc_call
|
||||
def skip_large_dataset_warning(self) -> "bool":
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
|
||||
@property
|
||||
@rpc_call
|
||||
def skip_large_dataset_check(self) -> "bool":
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
|
||||
@skip_large_dataset_check.setter
|
||||
@rpc_call
|
||||
def skip_large_dataset_check(self) -> "bool":
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
|
||||
@property
|
||||
@rpc_call
|
||||
def max_dataset_size_mb(self) -> "float":
|
||||
"""
|
||||
The maximum dataset size (in MB) permitted when fetching async data from history before prompting the user.
|
||||
"""
|
||||
|
||||
@max_dataset_size_mb.setter
|
||||
@rpc_call
|
||||
def max_dataset_size_mb(self) -> "float":
|
||||
"""
|
||||
The maximum dataset size (in MB) permitted when fetching async data from history before prompting the user.
|
||||
"""
|
||||
|
||||
@rpc_call
|
||||
def plot(
|
||||
self,
|
||||
|
@ -9,8 +9,19 @@ import pyqtgraph as pg
|
||||
from bec_lib import bec_logger, messages
|
||||
from bec_lib.endpoints import MessageEndpoints
|
||||
from pydantic import Field, ValidationError, field_validator
|
||||
from qtpy.QtCore import QTimer, Signal
|
||||
from qtpy.QtWidgets import QApplication, QDialog, QHBoxLayout, QMainWindow, QVBoxLayout, QWidget
|
||||
from qtpy.QtCore import Qt, QTimer, Signal
|
||||
from qtpy.QtWidgets import (
|
||||
QApplication,
|
||||
QCheckBox,
|
||||
QDialog,
|
||||
QDialogButtonBox,
|
||||
QDoubleSpinBox,
|
||||
QHBoxLayout,
|
||||
QLabel,
|
||||
QMainWindow,
|
||||
QVBoxLayout,
|
||||
QWidget,
|
||||
)
|
||||
|
||||
from bec_widgets.utils import ConnectionConfig
|
||||
from bec_widgets.utils.bec_signal_proxy import BECSignalProxy
|
||||
@ -33,6 +44,11 @@ class WaveformConfig(ConnectionConfig):
|
||||
color_palette: str | None = Field(
|
||||
"plasma", description="The color palette of the figure widget.", validate_default=True
|
||||
)
|
||||
max_dataset_size_mb: float = Field(
|
||||
10,
|
||||
description="Maximum dataset size (in MB) permitted when fetching async data from history before prompting the user.",
|
||||
validate_default=True,
|
||||
)
|
||||
|
||||
model_config: dict = {"validate_assignment": True}
|
||||
_validate_color_palette = field_validator("color_palette")(Colors.validate_color_map)
|
||||
@ -96,6 +112,12 @@ class Waveform(PlotBase):
|
||||
"x_entry.setter",
|
||||
"color_palette",
|
||||
"color_palette.setter",
|
||||
"skip_large_dataset_warning",
|
||||
"skip_large_dataset_warning.setter",
|
||||
"skip_large_dataset_check",
|
||||
"skip_large_dataset_check.setter",
|
||||
"max_dataset_size_mb",
|
||||
"max_dataset_size_mb.setter",
|
||||
"plot",
|
||||
"add_dap_curve",
|
||||
"remove_curve",
|
||||
@ -164,6 +186,10 @@ class Waveform(PlotBase):
|
||||
self._init_curve_dialog()
|
||||
self.curve_settings_dialog = None
|
||||
|
||||
# Large‑dataset guard
|
||||
self._skip_large_dataset_warning = False # session flag
|
||||
self._skip_large_dataset_check = False # per-plot flag, to skip the warning for this plot
|
||||
|
||||
# Scan status update loop
|
||||
self.bec_dispatcher.connect_slot(self.on_scan_status, MessageEndpoints.scan_status())
|
||||
self.bec_dispatcher.connect_slot(self.on_scan_progress, MessageEndpoints.scan_progress())
|
||||
@ -562,6 +588,59 @@ class Waveform(PlotBase):
|
||||
"""
|
||||
return [item for item in self.plot_item.curves if isinstance(item, Curve)]
|
||||
|
||||
@SafeProperty(bool)
|
||||
def skip_large_dataset_check(self) -> bool:
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
return self._skip_large_dataset_check
|
||||
|
||||
@skip_large_dataset_check.setter
|
||||
def skip_large_dataset_check(self, value: bool):
|
||||
"""
|
||||
Set whether to skip the large dataset warning when fetching async data.
|
||||
|
||||
Args:
|
||||
value(bool): Whether to skip the large dataset warning.
|
||||
"""
|
||||
self._skip_large_dataset_check = value
|
||||
|
||||
@SafeProperty(bool)
|
||||
def skip_large_dataset_warning(self) -> bool:
|
||||
"""
|
||||
Whether to skip the large dataset warning when fetching async data.
|
||||
"""
|
||||
return self._skip_large_dataset_warning
|
||||
|
||||
@skip_large_dataset_warning.setter
|
||||
def skip_large_dataset_warning(self, value: bool):
|
||||
"""
|
||||
Set whether to skip the large dataset warning when fetching async data.
|
||||
|
||||
Args:
|
||||
value(bool): Whether to skip the large dataset warning.
|
||||
"""
|
||||
self._skip_large_dataset_warning = value
|
||||
|
||||
@SafeProperty(float)
|
||||
def max_dataset_size_mb(self) -> float:
|
||||
"""
|
||||
The maximum dataset size (in MB) permitted when fetching async data from history before prompting the user.
|
||||
"""
|
||||
return self.config.max_dataset_size_mb
|
||||
|
||||
@max_dataset_size_mb.setter
|
||||
def max_dataset_size_mb(self, value: float):
|
||||
"""
|
||||
Set the maximum dataset size (in MB) permitted when fetching async data from history before prompting the user.
|
||||
|
||||
Args:
|
||||
value(float): The maximum dataset size in MB.
|
||||
"""
|
||||
if value <= 0:
|
||||
raise ValueError("Maximum dataset size must be greater than 0.")
|
||||
self.config.max_dataset_size_mb = value
|
||||
|
||||
################################################################################
|
||||
# High Level methods for API
|
||||
################################################################################
|
||||
@ -808,8 +887,6 @@ class Waveform(PlotBase):
|
||||
if config.source == "device":
|
||||
if self.scan_item is None:
|
||||
self.update_with_scan_history(-1)
|
||||
if curve in self._async_curves:
|
||||
self._setup_async_curve(curve)
|
||||
self.async_signal_update.emit()
|
||||
self.sync_signal_update.emit()
|
||||
if config.source == "dap":
|
||||
@ -1136,9 +1213,11 @@ class Waveform(PlotBase):
|
||||
if access_key == "val": # live access
|
||||
device_data = data.get(device_name, {}).get(device_entry, {}).get(access_key, None)
|
||||
else: # history access
|
||||
device_data = (
|
||||
data.get(device_name, {}).get(device_entry, {}).read().get("value", None)
|
||||
)
|
||||
dataset_obj = data.get(device_name, {})
|
||||
if self._skip_large_dataset_check is False:
|
||||
if not self._check_dataset_size_and_confirm(dataset_obj, device_entry):
|
||||
continue # user declined to load; skip this curve
|
||||
device_data = dataset_obj.get(device_entry, {}).read().get("value", None)
|
||||
|
||||
# if shape is 2D cast it into 1D and take the last waveform
|
||||
if len(np.shape(device_data)) > 1:
|
||||
@ -1210,9 +1289,6 @@ class Waveform(PlotBase):
|
||||
msg(dict): Message with the async data.
|
||||
metadata(dict): Metadata of the message.
|
||||
"""
|
||||
if self._scan_done:
|
||||
logger.info("Scan is done, ignoring async readback.")
|
||||
return
|
||||
sender = self.sender()
|
||||
if not hasattr(sender, "cb_info"):
|
||||
logger.info(f"Sender {sender} has no cb_info.")
|
||||
@ -1585,6 +1661,8 @@ class Waveform(PlotBase):
|
||||
dev_name = curve.config.signal.name
|
||||
if dev_name in readout_priority_async:
|
||||
self._async_curves.append(curve)
|
||||
if hasattr(self.scan_item, "live_data"):
|
||||
self._setup_async_curve(curve)
|
||||
found_async = True
|
||||
elif dev_name in readout_priority_sync:
|
||||
self._sync_curves.append(curve)
|
||||
@ -1661,6 +1739,106 @@ class Waveform(PlotBase):
|
||||
################################################################################
|
||||
# Utility Methods
|
||||
################################################################################
|
||||
|
||||
# Large dataset handling helpers
|
||||
def _check_dataset_size_and_confirm(self, dataset_obj, device_entry: str) -> bool:
|
||||
"""
|
||||
Check the size of the dataset and confirm with the user if it exceeds the limit.
|
||||
|
||||
Args:
|
||||
dataset_obj: The dataset object containing the information.
|
||||
device_entry( str): The specific device entry to check.
|
||||
|
||||
Returns:
|
||||
bool: True if the dataset is within the size limit or user confirmed to load it,
|
||||
False if the dataset exceeds the size limit and user declined to load it.
|
||||
"""
|
||||
try:
|
||||
info = dataset_obj._info
|
||||
mem_bytes = info.get(device_entry, {}).get("value", {}).get("mem_size", 0)
|
||||
# Fallback – grab first entry if lookup failed
|
||||
if mem_bytes == 0 and info:
|
||||
first_key = next(iter(info))
|
||||
mem_bytes = info[first_key]["value"]["mem_size"]
|
||||
size_mb = mem_bytes / (1024 * 1024)
|
||||
print(f"Dataset size: {size_mb:.1f} MB")
|
||||
except Exception as exc: # noqa: BLE001
|
||||
logger.error(f"Unable to evaluate dataset size: {exc}")
|
||||
return True
|
||||
|
||||
if size_mb <= self.config.max_dataset_size_mb:
|
||||
return True
|
||||
logger.warning(
|
||||
f"Attempt to load large dataset: {size_mb:.1f} MB "
|
||||
f"(limit {self.config.max_dataset_size_mb} MB)"
|
||||
)
|
||||
if self._skip_large_dataset_warning:
|
||||
logger.info("Skipping large dataset warning dialog.")
|
||||
return False
|
||||
return self._confirm_large_dataset(size_mb)
|
||||
|
||||
def _confirm_large_dataset(self, size_mb: float) -> bool:
|
||||
"""
|
||||
Confirm with the user whether to load a large dataset with dialog popup.
|
||||
Also allows the user to adjust the maximum dataset size limit and if user
|
||||
wants to see this popup again during session.
|
||||
|
||||
Args:
|
||||
size_mb(float): Size of the dataset in MB.
|
||||
|
||||
Returns:
|
||||
bool: True if the user confirmed to load the dataset, False otherwise.
|
||||
"""
|
||||
if self._skip_large_dataset_warning:
|
||||
return True
|
||||
|
||||
dialog = QDialog(self)
|
||||
dialog.setWindowTitle("Large dataset detected")
|
||||
main_dialog_layout = QVBoxLayout(dialog)
|
||||
|
||||
# Limit adjustment widgets
|
||||
limit_adjustment_layout = QHBoxLayout()
|
||||
limit_adjustment_layout.addWidget(QLabel("New limit (MB):"))
|
||||
spin = QDoubleSpinBox()
|
||||
spin.setRange(0.001, 4096)
|
||||
spin.setDecimals(3)
|
||||
spin.setSingleStep(0.01)
|
||||
spin.setValue(self.config.max_dataset_size_mb)
|
||||
spin.valueChanged.connect(lambda value: setattr(self.config, "max_dataset_size_mb", value))
|
||||
limit_adjustment_layout.addWidget(spin)
|
||||
|
||||
# Don't show again checkbox
|
||||
checkbox = QCheckBox("Don't show this again for this session")
|
||||
|
||||
buttons = QDialogButtonBox(
|
||||
QDialogButtonBox.Yes | QDialogButtonBox.No, Qt.Horizontal, dialog
|
||||
)
|
||||
buttons.accepted.connect(dialog.accept) # Yes
|
||||
buttons.rejected.connect(dialog.reject) # No
|
||||
|
||||
# widget layout
|
||||
main_dialog_layout.addWidget(
|
||||
QLabel(
|
||||
f"The selected dataset is {size_mb:.1f} MB which exceeds the "
|
||||
f"current limit of {self.config.max_dataset_size_mb} MB.\n"
|
||||
)
|
||||
)
|
||||
main_dialog_layout.addLayout(limit_adjustment_layout)
|
||||
main_dialog_layout.addWidget(checkbox)
|
||||
main_dialog_layout.addWidget(QLabel("Would you like to display dataset anyway?"))
|
||||
main_dialog_layout.addWidget(buttons)
|
||||
|
||||
result = dialog.exec() # modal; waits for user choice
|
||||
|
||||
# Respect the “don't show again” checkbox for *either* choice
|
||||
if checkbox.isChecked():
|
||||
self._skip_large_dataset_warning = True
|
||||
|
||||
if result == QDialog.Accepted:
|
||||
self.config.max_dataset_size_mb = spin.value()
|
||||
return True
|
||||
return False
|
||||
|
||||
def _ensure_str_list(self, entries: list | tuple | np.ndarray):
|
||||
"""
|
||||
Convert a variety of possible inputs (string, bytes, list/tuple/ndarray of either)
|
||||
@ -1791,7 +1969,7 @@ class DemoApp(QMainWindow): # pragma: no cover
|
||||
self.setCentralWidget(self.main_widget)
|
||||
|
||||
self.waveform_popup = Waveform(popups=True)
|
||||
self.waveform_popup.plot(y_name="monitor_async")
|
||||
self.waveform_popup.plot(y_name="waveform")
|
||||
|
||||
self.waveform_side = Waveform(popups=False)
|
||||
self.waveform_side.plot(y_name="bpm4i", y_entry="bpm4i", dap="GaussianModel")
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from types import SimpleNamespace
|
||||
from unittest import mock
|
||||
@ -7,6 +9,15 @@ import numpy as np
|
||||
import pyqtgraph as pg
|
||||
import pytest
|
||||
from pyqtgraph.graphicsItems.DateAxisItem import DateAxisItem
|
||||
from qtpy.QtCore import QTimer
|
||||
from qtpy.QtWidgets import (
|
||||
QApplication,
|
||||
QCheckBox,
|
||||
QDialog,
|
||||
QDialogButtonBox,
|
||||
QDoubleSpinBox,
|
||||
QSpinBox,
|
||||
)
|
||||
|
||||
from bec_widgets.widgets.plots.plot_base import UIMode
|
||||
from bec_widgets.widgets.plots.waveform.curve import DeviceSignal
|
||||
@ -533,6 +544,7 @@ def test_on_async_readback_add_update(qtbot, mocked_client):
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.scan_item = create_dummy_scan_item()
|
||||
wf._scan_done = False # simulate a live scan
|
||||
c = wf.plot(arg1="async_device", label="async_device-async_device")
|
||||
wf._async_curves = [c]
|
||||
# Suppose existing data
|
||||
@ -819,3 +831,227 @@ def test_show_dap_summary_popup(qtbot, mocked_client):
|
||||
wf.dap_summary_dialog.close()
|
||||
assert wf.dap_summary_dialog is None
|
||||
assert fit_action.isChecked() is False
|
||||
|
||||
|
||||
#####################################################
|
||||
# The following tests are for the async dataset guard
|
||||
#####################################################
|
||||
|
||||
|
||||
def test_skip_large_dataset_warning_property(qtbot, mocked_client):
|
||||
"""
|
||||
Verify the getter and setter of skip_large_dataset_warning work correctly.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
|
||||
# Default should be False
|
||||
assert wf.skip_large_dataset_warning is False
|
||||
|
||||
# Set to True
|
||||
wf.skip_large_dataset_warning = True
|
||||
assert wf.skip_large_dataset_warning is True
|
||||
|
||||
# Toggle back to False
|
||||
wf.skip_large_dataset_warning = False
|
||||
assert wf.skip_large_dataset_warning is False
|
||||
|
||||
|
||||
def test_max_dataset_size_mb_property(qtbot, mocked_client):
|
||||
"""
|
||||
Verify getter, setter, and validation of max_dataset_size_mb.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
|
||||
# Default from WaveformConfig is 1 MB
|
||||
assert wf.max_dataset_size_mb == 10
|
||||
|
||||
# Set to a valid new value
|
||||
wf.max_dataset_size_mb = 5.5
|
||||
assert wf.max_dataset_size_mb == 5.5
|
||||
# Ensure the config is updated too
|
||||
assert wf.config.max_dataset_size_mb == 5.5
|
||||
|
||||
|
||||
def _dummy_dataset(mem_bytes: int, entry: str = "waveform_waveform"):
|
||||
"""
|
||||
Return an object that mimics the BEC dataset structure:
|
||||
it has exactly one attribute `_info` with the expected layout.
|
||||
"""
|
||||
return SimpleNamespace(_info={entry: {"value": {"mem_size": mem_bytes}}})
|
||||
|
||||
|
||||
def test_dataset_guard_under_limit(qtbot, mocked_client, monkeypatch):
|
||||
"""
|
||||
Dataset below the limit should load without triggering the dialog.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1 # 1 MiB
|
||||
|
||||
# If the dialog is called, we flip this flag – it must stay False.
|
||||
called = {"dlg": False}
|
||||
monkeypatch.setattr(
|
||||
Waveform, "_confirm_large_dataset", lambda self, size_mb: called.__setitem__("dlg", True)
|
||||
)
|
||||
|
||||
dataset = _dummy_dataset(mem_bytes=512_000) # ≈0.49 MiB
|
||||
assert wf._check_dataset_size_and_confirm(dataset, "waveform_waveform") is True
|
||||
assert called["dlg"] is False
|
||||
|
||||
|
||||
def test_dataset_guard_over_limit_accept(qtbot, mocked_client, monkeypatch):
|
||||
"""
|
||||
Dataset above the limit where user presses *Yes*.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1 # 1 MiB
|
||||
|
||||
# Pretend the user clicked “Yes”
|
||||
monkeypatch.setattr(Waveform, "_confirm_large_dataset", lambda *_: True)
|
||||
|
||||
dataset = _dummy_dataset(mem_bytes=2_000_000) # ≈1.9 MiB
|
||||
assert wf._check_dataset_size_and_confirm(dataset, "waveform_waveform") is True
|
||||
|
||||
|
||||
def test_dataset_guard_over_limit_reject(qtbot, mocked_client, monkeypatch):
|
||||
"""
|
||||
Dataset above the limit where user presses *No*.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1 # 1 MiB
|
||||
|
||||
# Pretend the user clicked “No”
|
||||
monkeypatch.setattr(Waveform, "_confirm_large_dataset", lambda *_: False)
|
||||
|
||||
dataset = _dummy_dataset(mem_bytes=2_000_000) # ≈1.9 MiB
|
||||
assert wf._check_dataset_size_and_confirm(dataset, "waveform_waveform") is False
|
||||
|
||||
|
||||
##################################################
|
||||
# Dialog propagation behaviour
|
||||
##################################################
|
||||
|
||||
|
||||
def test_dialog_accept_updates_limit(monkeypatch, qtbot, mocked_client):
|
||||
"""
|
||||
Simulate clicking 'Yes' in the dialog *after* changing the spinner value.
|
||||
Verify max_dataset_size_mb is updated and dataset loads.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1 # start small
|
||||
|
||||
def fake_confirm(self, size_mb):
|
||||
# Simulate user typing '5' in the spinbox then pressing Yes
|
||||
self.config.max_dataset_size_mb = 5
|
||||
return True # Yes pressed
|
||||
|
||||
monkeypatch.setattr(Waveform, "_confirm_large_dataset", fake_confirm)
|
||||
|
||||
big_dataset = _dummy_dataset(mem_bytes=4_800_000) # ≈4.6 MiB
|
||||
accepted = wf._check_dataset_size_and_confirm(big_dataset, "waveform_waveform")
|
||||
|
||||
# The load should be accepted and the limit must reflect the new value
|
||||
assert accepted is True
|
||||
assert wf.max_dataset_size_mb == 5
|
||||
assert wf.config.max_dataset_size_mb == 5
|
||||
|
||||
|
||||
def test_dialog_cancel_sets_skip(monkeypatch, qtbot, mocked_client):
|
||||
"""
|
||||
Simulate clicking 'No' but ticking 'Don't show again'.
|
||||
Verify skip_large_dataset_warning becomes True and dataset is skipped.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
assert wf.skip_large_dataset_warning is False
|
||||
|
||||
def fake_confirm(self, size_mb):
|
||||
# Mimic ticking the checkbox then pressing No
|
||||
self._skip_large_dataset_warning = True
|
||||
return False # No pressed
|
||||
|
||||
monkeypatch.setattr(Waveform, "_confirm_large_dataset", fake_confirm)
|
||||
|
||||
big_dataset = _dummy_dataset(mem_bytes=11_000_000)
|
||||
accepted = wf._check_dataset_size_and_confirm(big_dataset, "waveform_waveform")
|
||||
|
||||
# Dataset must not load, but future warnings are suppressed
|
||||
assert accepted is False
|
||||
assert wf.skip_large_dataset_warning is True
|
||||
|
||||
|
||||
##################################################
|
||||
# Live dialog interaction (no monkey‑patching)
|
||||
##################################################
|
||||
|
||||
|
||||
def _open_dialog_and_click(handler):
|
||||
"""
|
||||
Utility that schedules *handler* to run as soon as a modal
|
||||
dialog is shown. Returns a function suitable for QTimer.singleShot.
|
||||
"""
|
||||
|
||||
def _cb():
|
||||
# Locate the active modal dialog
|
||||
dlg = QApplication.activeModalWidget()
|
||||
assert isinstance(dlg, QDialog), "No active modal dialog found"
|
||||
handler(dlg)
|
||||
|
||||
return _cb
|
||||
|
||||
|
||||
def test_dialog_accept_real_interaction(qtbot, mocked_client):
|
||||
"""
|
||||
End‑to‑end: user changes the limit spinner to 5 MiB, ticks
|
||||
'don't show again', then presses YES.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1
|
||||
|
||||
# Prepare a large dataset (≈4.6 MiB)
|
||||
big_dataset = _dummy_dataset(mem_bytes=4_800_000)
|
||||
|
||||
def handler(dlg):
|
||||
spin: QDoubleSpinBox = dlg.findChild(QDoubleSpinBox)
|
||||
chk: QCheckBox = dlg.findChild(QCheckBox)
|
||||
btns: QDialogButtonBox = dlg.findChild(QDialogButtonBox)
|
||||
|
||||
# # Interact with widgets
|
||||
spin.setValue(5)
|
||||
chk.setChecked(True)
|
||||
|
||||
yes_btn = btns.button(QDialogButtonBox.Yes)
|
||||
yes_btn.click()
|
||||
|
||||
# Schedule the handler right before invoking the check
|
||||
QTimer.singleShot(0, _open_dialog_and_click(handler))
|
||||
|
||||
accepted = wf._check_dataset_size_and_confirm(big_dataset, "waveform_waveform")
|
||||
assert accepted is True
|
||||
assert wf.max_dataset_size_mb == 5
|
||||
assert wf.skip_large_dataset_warning is True
|
||||
|
||||
|
||||
def test_dialog_reject_real_interaction(qtbot, mocked_client):
|
||||
"""
|
||||
End‑to‑end: user leaves spinner unchanged, ticks 'don't show again',
|
||||
and presses NO.
|
||||
"""
|
||||
wf = create_widget(qtbot, Waveform, client=mocked_client)
|
||||
wf.max_dataset_size_mb = 1
|
||||
|
||||
big_dataset = _dummy_dataset(mem_bytes=4_800_000)
|
||||
|
||||
def handler(dlg):
|
||||
chk: QCheckBox = dlg.findChild(QCheckBox)
|
||||
btns: QDialogButtonBox = dlg.findChild(QDialogButtonBox)
|
||||
|
||||
chk.setChecked(True)
|
||||
no_btn = btns.button(QDialogButtonBox.No)
|
||||
no_btn.click()
|
||||
|
||||
QTimer.singleShot(0, _open_dialog_and_click(handler))
|
||||
|
||||
accepted = wf._check_dataset_size_and_confirm(big_dataset, "waveform_waveform")
|
||||
assert accepted is False
|
||||
assert wf.skip_large_dataset_warning is True
|
||||
# Limit remains unchanged
|
||||
assert wf.max_dataset_size_mb == 1
|
||||
|
Reference in New Issue
Block a user