191 lines
6.1 KiB
Python
191 lines
6.1 KiB
Python
import pytest
|
|
from pathlib import Path
|
|
|
|
from slic.core.scanner.scaninfo import ScanInfo
|
|
|
|
class DummyAdjustable:
|
|
def __init__(self, name="adj", ID="id", units="u"):
|
|
self.name = name
|
|
self.ID = ID
|
|
self.units = units
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
"adjustables,values,suffix,expected_filename,expected_params",
|
|
[
|
|
# Case 1: single adjustable, default names
|
|
(
|
|
[DummyAdjustable()],
|
|
[1, 2, 3],
|
|
"_scan_info.json",
|
|
"fileA_scan_info.json",
|
|
{"name": ["adj"], "Id": ["id"], "units": ["u"]},
|
|
),
|
|
|
|
# Case 2: single adjustable with custom fields
|
|
(
|
|
[DummyAdjustable("motorX", "M1", "mm")],
|
|
[10, 20],
|
|
".meta",
|
|
"fileB.meta",
|
|
{"name": ["motorX"], "Id": ["M1"], "units": ["mm"]},
|
|
),
|
|
|
|
# Case 3: multiple adjustables
|
|
(
|
|
[
|
|
DummyAdjustable("motorX", "M1", "mm"),
|
|
DummyAdjustable("stageY", "S2", "deg"),
|
|
DummyAdjustable("lensZ", "L3", "cm"),
|
|
],
|
|
[1, 2, 3],
|
|
"_extra.json",
|
|
"fileC_extra.json",
|
|
{
|
|
"name": ["motorX", "stageY", "lensZ"],
|
|
"Id": ["M1", "S2", "L3"],
|
|
"units": ["mm", "deg", "cm"],
|
|
},
|
|
),
|
|
],
|
|
)
|
|
def test_init_creates_expected_filename(tmp_path, adjustables, values, suffix, expected_filename, expected_params):
|
|
# Tests that ScanInfo correctly computes the output filename and extracts metadata.
|
|
base_dir = tmp_path
|
|
filename_base = expected_filename.split("_")[0].split(".")[0]
|
|
si = ScanInfo(filename_base, base_dir, adjustables, values, suffix=suffix)
|
|
|
|
# Filename must end with the expected suffix
|
|
assert si.filename.endswith(expected_filename)
|
|
|
|
# The parameters (names, IDs, units) must match the expected structure
|
|
assert si.parameters == expected_params
|
|
|
|
# Internal storage lists must be empty before data is appended
|
|
assert si.values == []
|
|
assert si.readbacks == []
|
|
assert si.files == []
|
|
assert si.info == []
|
|
|
|
|
|
def test_append(tmp_path):
|
|
# Tests that append() stores values, readbacks, files, and info in consistent parallel lists.
|
|
si = ScanInfo("fileX", tmp_path, [DummyAdjustable("A", "1", "u")], [0])
|
|
|
|
# First append
|
|
si.append([1, 2, 3], [10, 20, 30], ["f1.dat", "f2.dat", "f3.dat"], {"note": "phase1"})
|
|
assert si.values == [[1, 2, 3]]
|
|
assert si.readbacks == [[10, 20, 30]]
|
|
assert si.files == [["f1.dat", "f2.dat", "f3.dat"]]
|
|
assert si.info == [{"note": "phase1"}]
|
|
|
|
# Second append with callable info
|
|
si.append([4, 5], [40, 50], ["f4.dat", "f5.dat"], lambda: {"note": "auto_phase2"})
|
|
assert si.values == [[1, 2, 3], [4, 5]]
|
|
assert si.readbacks == [[10, 20, 30], [40, 50]]
|
|
assert si.files == [["f1.dat", "f2.dat", "f3.dat"], ["f4.dat", "f5.dat"]]
|
|
assert si.info == [{"note": "phase1"}, {"note": "auto_phase2"}]
|
|
|
|
|
|
def test_write_and_to_dict(tmp_path, monkeypatch):
|
|
# Tests that write() calls json_save with the complete structure returned by to_dict().
|
|
# Monkeypatch json_save to avoid writing to disk.
|
|
from slic.utils import json_save
|
|
calls = {}
|
|
|
|
def fake_json_save(data, filename):
|
|
calls["data"] = data
|
|
calls["filename"] = filename
|
|
|
|
monkeypatch.setattr("slic.utils.json_save", fake_json_save)
|
|
|
|
si = ScanInfo(
|
|
"scanTest",
|
|
tmp_path,
|
|
[
|
|
DummyAdjustable("motorX", "M1", "mm"),
|
|
DummyAdjustable("stageY", "S2", "deg"),
|
|
],
|
|
[0],
|
|
suffix="_info.json",
|
|
)
|
|
|
|
# Append two sets of data
|
|
si.append([1.0, 2.0], [1.1, 2.1], ["f1.dat", "f2.dat"], {"phase": "init"})
|
|
si.append([3.0, 4.0], [3.1, 4.1], ["f3.dat", "f4.dat"], {"phase": "end"})
|
|
|
|
# Write
|
|
si.write()
|
|
|
|
# Check arguments passed to json_save
|
|
assert calls["filename"] == si.filename
|
|
assert calls["data"] == si.to_dict()
|
|
|
|
|
|
def test_update_integration(tmp_path, monkeypatch):
|
|
# Tests that update() appends data and immediately writes the updated structure.
|
|
from slic.utils import json_save
|
|
calls = {}
|
|
|
|
def fake_json_save(data, filename):
|
|
calls["data"] = data
|
|
calls["filename"] = filename
|
|
|
|
monkeypatch.setattr("slic.utils.json_save", fake_json_save)
|
|
|
|
si = ScanInfo("scanX", tmp_path, [DummyAdjustable("M", "ID", "mm")], [0], suffix=".json")
|
|
|
|
# Call update()
|
|
si.update([1, 2], [10, 20], ["f1.dat", "f2.dat"], {"phase": "start"})
|
|
|
|
# Internal structure must reflect a single appended entry
|
|
assert si.values == [[1, 2]]
|
|
assert si.readbacks == [[10, 20]]
|
|
assert si.files == [["f1.dat", "f2.dat"]]
|
|
assert si.info == [{"phase": "start"}]
|
|
|
|
# json_save must receive the full dictionary
|
|
assert calls["filename"] == si.filename
|
|
assert calls["data"] == si.to_dict()
|
|
|
|
|
|
def test_to_sfdaq_dict_filled_example(tmp_path):
|
|
# Tests that to_sfdaq_dict() produces a well-defined dictionary matching SFDAQ requirements.
|
|
si = ScanInfo(
|
|
filename_base="scanAlpha",
|
|
base_dir=tmp_path,
|
|
adjustables=[
|
|
DummyAdjustable("motorX", "M1", "mm"),
|
|
DummyAdjustable("stageY", "S2", "deg"),
|
|
DummyAdjustable("lensZ", "L3", "cm"),
|
|
],
|
|
values=[0, 1, 2],
|
|
suffix="_scan_info.json",
|
|
)
|
|
|
|
# Case 1: no data appended yet
|
|
result_empty = si.to_sfdaq_dict()
|
|
assert result_empty["scan_values"] is None
|
|
assert result_empty["scan_readbacks"] is None
|
|
|
|
# Append two datasets
|
|
si.append([1.0, 2.0, 3.0], [1.1, 2.1, 3.1], ["f1.dat"], {"note": "first run"})
|
|
si.append([4.0, 5.0, 6.0], [4.1, 5.1, 6.1], ["f2.dat"], {"note": "second run"})
|
|
|
|
result = si.to_sfdaq_dict()
|
|
|
|
# Verify returned structure
|
|
expected_dict = {
|
|
"scan_name": "scanAlpha",
|
|
"name": ["motorX", "stageY", "lensZ"],
|
|
"Id": ["M1", "S2", "L3"],
|
|
"units": ["mm", "deg", "cm"],
|
|
"offset": [0, 0, 0],
|
|
"conversion_factor": [1, 1, 1],
|
|
"scan_values": [4.0, 5.0, 6.0],
|
|
"scan_readbacks": [4.1, 5.1, 6.1],
|
|
"scan_readbacks_raw": [4.1, 5.1, 6.1],
|
|
}
|
|
|
|
assert result == expected_dict
|