Compare commits
22 Commits
feat/eps_d
...
refactor/m
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22c48115a4 | ||
| 2a7448526b | |||
|
|
a5825307e5 | ||
|
|
54f1f42332 | ||
|
|
48df15f35c | ||
|
|
6f60bd4b2b | ||
| 5d97913956 | |||
| 93384b87e0 | |||
| 9a249363fd | |||
| f925a7c1db | |||
| 5811e445fe | |||
| 16ea7f410e | |||
| 9db56f5273 | |||
| 705df4b253 | |||
| 181b57494b | |||
| efd51462fc | |||
| 8a69c7aa36 | |||
| b19bfb7ca4 | |||
| b818181da2 | |||
| 303929f8e6 | |||
| 9f5799385c | |||
|
|
cb968abe73 |
@@ -1,2 +1,6 @@
|
|||||||
from .load_additional_correction import lamni_read_additional_correction
|
from .alignment import XrayEyeAlign
|
||||||
from .x_ray_eye_align import DataDrivenLamNI, LamNI, MagLamNI, XrayEyeAlign
|
from .lamni import LamNI
|
||||||
|
from .lamni_optics_mixin import LamNIInitError, LaMNIInitStages, LamNIOpticsMixin
|
||||||
|
__all__ = [
|
||||||
|
"LamNI", "XrayEyeAlign", "LamNIInitError", "LaMNIInitStages", "LamNIOpticsMixin"
|
||||||
|
]
|
||||||
461
csaxs_bec/bec_ipython_client/plugins/LamNI/alignment.py
Normal file
461
csaxs_bec/bec_ipython_client/plugins/LamNI/alignment.py
Normal file
@@ -0,0 +1,461 @@
|
|||||||
|
import builtins
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from bec_lib import bec_logger
|
||||||
|
from typeguard import typechecked
|
||||||
|
|
||||||
|
from csaxs_bec.bec_ipython_client.plugins.cSAXS import epics_get, epics_put, fshopen
|
||||||
|
|
||||||
|
logger = bec_logger.logger
|
||||||
|
|
||||||
|
if builtins.__dict__.get("bec") is not None:
|
||||||
|
bec = builtins.__dict__.get("bec")
|
||||||
|
dev = builtins.__dict__.get("dev")
|
||||||
|
umv = builtins.__dict__.get("umv")
|
||||||
|
umvr = builtins.__dict__.get("umvr")
|
||||||
|
|
||||||
|
|
||||||
|
class XrayEyeAlign:
|
||||||
|
# pixel calibration, multiply to get mm
|
||||||
|
# PIXEL_CALIBRATION = 0.2/209 #.2 with binning
|
||||||
|
PIXEL_CALIBRATION = 0.2 / 218 # .2 with binning
|
||||||
|
|
||||||
|
def __init__(self, client, lamni) -> None:
|
||||||
|
self.client = client
|
||||||
|
self.lamni = lamni
|
||||||
|
self.device_manager = client.device_manager
|
||||||
|
self.scans = client.scans
|
||||||
|
self.alignment_values = defaultdict(list)
|
||||||
|
self._reset_init_values()
|
||||||
|
self.corr_pos_x = []
|
||||||
|
self.corr_pos_y = []
|
||||||
|
self.corr_angle = []
|
||||||
|
self.corr_pos_x_2 = []
|
||||||
|
self.corr_pos_y_2 = []
|
||||||
|
self.corr_angle_2 = []
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Correction reset
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def reset_correction(self):
|
||||||
|
self.corr_pos_x = []
|
||||||
|
self.corr_pos_y = []
|
||||||
|
self.corr_angle = []
|
||||||
|
|
||||||
|
def reset_correction_2(self):
|
||||||
|
self.corr_pos_x_2 = []
|
||||||
|
self.corr_pos_y_2 = []
|
||||||
|
self.corr_angle_2 = []
|
||||||
|
|
||||||
|
def reset_xray_eye_correction(self):
|
||||||
|
self.client.delete_global_var("tomo_fit_xray_eye")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# FOV offset properties
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tomo_fovx_offset(self):
|
||||||
|
val = self.client.get_global_var("tomo_fov_offset")
|
||||||
|
if val is None:
|
||||||
|
return 0.0
|
||||||
|
return val[0] / 1000
|
||||||
|
|
||||||
|
@tomo_fovx_offset.setter
|
||||||
|
@typechecked
|
||||||
|
def tomo_fovx_offset(self, val: float):
|
||||||
|
val_old = self.client.get_global_var("tomo_fov_offset")
|
||||||
|
if val_old is None:
|
||||||
|
val_old = [0.0, 0.0]
|
||||||
|
self.client.set_global_var("tomo_fov_offset", [val * 1000, val_old[1]])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tomo_fovy_offset(self):
|
||||||
|
val = self.client.get_global_var("tomo_fov_offset")
|
||||||
|
if val is None:
|
||||||
|
return 0.0
|
||||||
|
return val[1] / 1000
|
||||||
|
|
||||||
|
@tomo_fovy_offset.setter
|
||||||
|
@typechecked
|
||||||
|
def tomo_fovy_offset(self, val: float):
|
||||||
|
val_old = self.client.get_global_var("tomo_fov_offset")
|
||||||
|
if val_old is None:
|
||||||
|
val_old = [0.0, 0.0]
|
||||||
|
self.client.set_global_var("tomo_fov_offset", [val_old[0], val * 1000])
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Internal helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _reset_init_values(self):
|
||||||
|
self.shift_xy = [0, 0]
|
||||||
|
self._xray_fov_xy = [0, 0]
|
||||||
|
|
||||||
|
def _disable_rt_feedback(self):
|
||||||
|
self.device_manager.devices.rtx.controller.feedback_disable()
|
||||||
|
|
||||||
|
def _enable_rt_feedback(self):
|
||||||
|
self.device_manager.devices.rtx.controller.feedback_enable_with_reset()
|
||||||
|
|
||||||
|
def tomo_rotate(self, val: float):
|
||||||
|
# pylint: disable=undefined-variable
|
||||||
|
umv(self.device_manager.devices.lsamrot, val)
|
||||||
|
|
||||||
|
def get_tomo_angle(self):
|
||||||
|
return self.device_manager.devices.lsamrot.readback.read()["lsamrot"]["value"]
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# X-ray eye camera control
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def save_frame(self):
|
||||||
|
epics_put("XOMNYI-XEYE-SAVFRAME:0", 1)
|
||||||
|
|
||||||
|
def update_frame(self):
|
||||||
|
epics_put("XOMNYI-XEYE-ACQDONE:0", 0)
|
||||||
|
# start live
|
||||||
|
epics_put("XOMNYI-XEYE-ACQ:0", 1)
|
||||||
|
# wait for start live
|
||||||
|
while epics_get("XOMNYI-XEYE-ACQDONE:0") == 0:
|
||||||
|
time.sleep(0.5)
|
||||||
|
print("waiting for live view to start...")
|
||||||
|
fshopen()
|
||||||
|
|
||||||
|
epics_put("XOMNYI-XEYE-ACQDONE:0", 0)
|
||||||
|
|
||||||
|
while epics_get("XOMNYI-XEYE-ACQDONE:0") == 0:
|
||||||
|
print("waiting for new frame...")
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
|
time.sleep(0.5)
|
||||||
|
# stop live view
|
||||||
|
epics_put("XOMNYI-XEYE-ACQ:0", 0)
|
||||||
|
time.sleep(1)
|
||||||
|
print("got new frame")
|
||||||
|
|
||||||
|
def update_fov(self, k: int):
|
||||||
|
self._xray_fov_xy[0] = max(epics_get(f"XOMNYI-XEYE-XWIDTH_X:{k}"), self._xray_fov_xy[0])
|
||||||
|
self._xray_fov_xy[1] = max(0, self._xray_fov_xy[0])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def movement_buttons_enabled(self):
|
||||||
|
return [epics_get("XOMNYI-XEYE-ENAMVX:0"), epics_get("XOMNYI-XEYE-ENAMVY:0")]
|
||||||
|
|
||||||
|
@movement_buttons_enabled.setter
|
||||||
|
def movement_buttons_enabled(self, enabled: bool):
|
||||||
|
enabled = int(enabled)
|
||||||
|
epics_put("XOMNYI-XEYE-ENAMVX:0", enabled)
|
||||||
|
epics_put("XOMNYI-XEYE-ENAMVY:0", enabled)
|
||||||
|
|
||||||
|
def send_message(self, msg: str):
|
||||||
|
epics_put("XOMNYI-XEYE-MESSAGE:0.DESC", msg)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Alignment procedure
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def align(self):
|
||||||
|
self._reset_init_values()
|
||||||
|
self.reset_correction()
|
||||||
|
self.reset_correction_2()
|
||||||
|
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
epics_put("XOMNYI-XEYE-PIXELSIZE:0", self.PIXEL_CALIBRATION)
|
||||||
|
self._enable_rt_feedback()
|
||||||
|
|
||||||
|
self.movement_buttons_enabled = False
|
||||||
|
epics_put("XOMNYI-XEYE-ACQ:0", 0)
|
||||||
|
self.send_message("please wait...")
|
||||||
|
epics_put("XOMNYI-XEYE-SAMPLENAME:0.DESC", "Let us LAMNI...")
|
||||||
|
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
k = 0
|
||||||
|
|
||||||
|
self.lamni.lfzp_in()
|
||||||
|
self.update_frame()
|
||||||
|
|
||||||
|
self.movement_buttons_enabled = False
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", 0)
|
||||||
|
epics_put("XOMNYI-XEYE-STEP:0", 0)
|
||||||
|
self.send_message("Submit center value of FZP.")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if epics_get("XOMNYI-XEYE-SUBMIT:0") == 1:
|
||||||
|
val_x = epics_get(f"XOMNYI-XEYE-XVAL_X:{k}") * self.PIXEL_CALIBRATION # in mm
|
||||||
|
val_y = epics_get(f"XOMNYI-XEYE-YVAL_Y:{k}") * self.PIXEL_CALIBRATION # in mm
|
||||||
|
self.alignment_values[k] = [val_x, val_y]
|
||||||
|
print(
|
||||||
|
f"Clicked position {k}: x {self.alignment_values[k][0]}, y"
|
||||||
|
f" {self.alignment_values[k][1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if k == 0: # received center value of FZP
|
||||||
|
self.send_message("please wait ...")
|
||||||
|
self.lamni.loptics_out()
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", -1)
|
||||||
|
self.movement_buttons_enabled = False
|
||||||
|
print("Moving sample in, FZP out")
|
||||||
|
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
time.sleep(0.3)
|
||||||
|
self._enable_rt_feedback()
|
||||||
|
time.sleep(0.3)
|
||||||
|
|
||||||
|
self.update_frame()
|
||||||
|
self.send_message("Go and find the sample")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", 0)
|
||||||
|
self.movement_buttons_enabled = True
|
||||||
|
|
||||||
|
elif k == 1: # received sample center value at samrot 0
|
||||||
|
msg = (
|
||||||
|
f"Base shift values from movement are x {self.shift_xy[0]}, y"
|
||||||
|
f" {self.shift_xy[1]}"
|
||||||
|
)
|
||||||
|
print(msg)
|
||||||
|
logger.info(msg)
|
||||||
|
self.shift_xy[0] += (
|
||||||
|
self.alignment_values[0][0] - self.alignment_values[1][0]
|
||||||
|
) * 1000
|
||||||
|
self.shift_xy[1] += (
|
||||||
|
self.alignment_values[1][1] - self.alignment_values[0][1]
|
||||||
|
) * 1000
|
||||||
|
print(
|
||||||
|
"Base shift values from movement and clicked position are x"
|
||||||
|
f" {self.shift_xy[0]}, y {self.shift_xy[1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.scans.lamni_move_to_scan_center(
|
||||||
|
self.shift_xy[0] / 1000, self.shift_xy[1] / 1000, self.get_tomo_angle()
|
||||||
|
).wait()
|
||||||
|
|
||||||
|
self.send_message("please wait ...")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", -1)
|
||||||
|
self.movement_buttons_enabled = False
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
self.scans.lamni_move_to_scan_center(
|
||||||
|
self.shift_xy[0] / 1000, self.shift_xy[1] / 1000, self.get_tomo_angle()
|
||||||
|
).wait()
|
||||||
|
|
||||||
|
epics_put("XOMNYI-XEYE-ANGLE:0", self.get_tomo_angle())
|
||||||
|
self.update_frame()
|
||||||
|
self.send_message("Submit sample center and FOV (0 deg)")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", 0)
|
||||||
|
self.update_fov(k)
|
||||||
|
|
||||||
|
elif 1 < k < 10: # received sample center value at samrot 0 ... 315
|
||||||
|
self.send_message("please wait ...")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", -1)
|
||||||
|
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
self.tomo_rotate((k - 1) * 45 - 45 / 2)
|
||||||
|
self.scans.lamni_move_to_scan_center(
|
||||||
|
self.shift_xy[0] / 1000, self.shift_xy[1] / 1000, self.get_tomo_angle()
|
||||||
|
).wait()
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
self.tomo_rotate((k - 1) * 45)
|
||||||
|
self.scans.lamni_move_to_scan_center(
|
||||||
|
self.shift_xy[0] / 1000, self.shift_xy[1] / 1000, self.get_tomo_angle()
|
||||||
|
).wait()
|
||||||
|
|
||||||
|
epics_put("XOMNYI-XEYE-ANGLE:0", self.get_tomo_angle())
|
||||||
|
self.update_frame()
|
||||||
|
self.send_message("Submit sample center")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", 0)
|
||||||
|
epics_put("XOMNYI-XEYE-ENAMVX:0", 1)
|
||||||
|
self.update_fov(k)
|
||||||
|
|
||||||
|
elif k == 10: # received sample center value at samrot 270, done
|
||||||
|
self.send_message("done...")
|
||||||
|
epics_put("XOMNYI-XEYE-SUBMIT:0", -1)
|
||||||
|
self.movement_buttons_enabled = False
|
||||||
|
self.update_fov(k)
|
||||||
|
break
|
||||||
|
|
||||||
|
k += 1
|
||||||
|
epics_put("XOMNYI-XEYE-STEP:0", k)
|
||||||
|
|
||||||
|
if k < 2:
|
||||||
|
_xrayeyalignmvx = epics_get("XOMNYI-XEYE-MVX:0")
|
||||||
|
_xrayeyalignmvy = epics_get("XOMNYI-XEYE-MVY:0")
|
||||||
|
if _xrayeyalignmvx != 0 or _xrayeyalignmvy != 0:
|
||||||
|
self.shift_xy[0] = self.shift_xy[0] + _xrayeyalignmvx
|
||||||
|
self.shift_xy[1] = self.shift_xy[1] + _xrayeyalignmvy
|
||||||
|
self.scans.lamni_move_to_scan_center(
|
||||||
|
self.shift_xy[0] / 1000, self.shift_xy[1] / 1000, self.get_tomo_angle()
|
||||||
|
).wait()
|
||||||
|
print(
|
||||||
|
f"Current center horizontal {self.shift_xy[0]} vertical {self.shift_xy[1]}"
|
||||||
|
)
|
||||||
|
epics_put("XOMNYI-XEYE-MVY:0", 0)
|
||||||
|
epics_put("XOMNYI-XEYE-MVX:0", 0)
|
||||||
|
self.update_frame()
|
||||||
|
|
||||||
|
time.sleep(0.2)
|
||||||
|
|
||||||
|
self.write_output()
|
||||||
|
fovx = self._xray_fov_xy[0] * self.PIXEL_CALIBRATION * 1000 / 2
|
||||||
|
fovy = self._xray_fov_xy[1] * self.PIXEL_CALIBRATION * 1000 / 2
|
||||||
|
print(
|
||||||
|
f"The largest field of view from the xrayeyealign was \nfovx = {fovx:.0f} microns,"
|
||||||
|
f" fovy = {fovy:.0f} microns"
|
||||||
|
)
|
||||||
|
print("Use matlab routine to fit the current alignment...")
|
||||||
|
print(
|
||||||
|
"This additional shift is applied to the base shift values\n which are x"
|
||||||
|
f" {self.shift_xy[0]}, y {self.shift_xy[1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._disable_rt_feedback()
|
||||||
|
self.tomo_rotate(0)
|
||||||
|
|
||||||
|
print(
|
||||||
|
"\n\nNEXT LOAD ALIGNMENT PARAMETERS\nby running"
|
||||||
|
" lamni.align.read_xray_eye_correction()\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.client.set_global_var("tomo_fov_offset", self.shift_xy)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Alignment output
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def write_output(self):
|
||||||
|
import os
|
||||||
|
with open(
|
||||||
|
os.path.expanduser("~/Data10/specES1/internal/xrayeye_alignmentvalues"), "w"
|
||||||
|
) as alignment_values_file:
|
||||||
|
alignment_values_file.write("angle\thorizontal\tvertical\n")
|
||||||
|
for k in range(2, 11):
|
||||||
|
fovx_offset = (self.alignment_values[0][0] - self.alignment_values[k][0]) * 1000
|
||||||
|
fovy_offset = (self.alignment_values[k][1] - self.alignment_values[0][1]) * 1000
|
||||||
|
print(
|
||||||
|
f"Writing to file new alignment: number {k}, value x {fovx_offset}, y"
|
||||||
|
f" {fovy_offset}"
|
||||||
|
)
|
||||||
|
alignment_values_file.write(f"{(k-2)*45}\t{fovx_offset}\t{fovy_offset}\n")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# X-ray eye sinusoidal correction (loaded from MATLAB fit files)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def read_xray_eye_correction(self, dir_path=None):
|
||||||
|
import os
|
||||||
|
if dir_path is None:
|
||||||
|
dir_path = os.path.expanduser("~/Data10/specES1/internal/")
|
||||||
|
tomo_fit_xray_eye = np.zeros((2, 3))
|
||||||
|
for i, axis in enumerate(["x", "y"]):
|
||||||
|
for j, coeff in enumerate(["A", "B", "C"]):
|
||||||
|
with open(os.path.join(dir_path, f"ptychotomoalign_{coeff}{axis}.txt"), "r") as f:
|
||||||
|
tomo_fit_xray_eye[i][j] = f.readline()
|
||||||
|
|
||||||
|
self.client.set_global_var("tomo_fit_xray_eye", tomo_fit_xray_eye.tolist())
|
||||||
|
# x amp, phase, offset, y amp, phase, offset
|
||||||
|
# 0 0 0 1 0 2 1 0 1 1 1 2
|
||||||
|
print("New alignment parameters loaded from X-ray eye")
|
||||||
|
print(
|
||||||
|
f"X Amplitude {tomo_fit_xray_eye[0][0]}, "
|
||||||
|
f"X Phase {tomo_fit_xray_eye[0][1]}, "
|
||||||
|
f"X Offset {tomo_fit_xray_eye[0][2]}, "
|
||||||
|
f"Y Amplitude {tomo_fit_xray_eye[1][0]}, "
|
||||||
|
f"Y Phase {tomo_fit_xray_eye[1][1]}, "
|
||||||
|
f"Y Offset {tomo_fit_xray_eye[1][2]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def lamni_compute_additional_correction_xeye_mu(self, angle):
|
||||||
|
"""Compute sinusoidal correction from the X-ray eye fit for the given angle."""
|
||||||
|
tomo_fit_xray_eye = self.client.get_global_var("tomo_fit_xray_eye")
|
||||||
|
if tomo_fit_xray_eye is None:
|
||||||
|
print("Not applying any additional correction. No x-ray eye data available.\n")
|
||||||
|
return (0, 0)
|
||||||
|
|
||||||
|
# x amp, phase, offset, y amp, phase, offset
|
||||||
|
# 0 0 0 1 0 2 1 0 1 1 1 2
|
||||||
|
correction_x = (
|
||||||
|
tomo_fit_xray_eye[0][0] * np.sin(np.radians(angle) + tomo_fit_xray_eye[0][1])
|
||||||
|
+ tomo_fit_xray_eye[0][2]
|
||||||
|
) / 1000
|
||||||
|
correction_y = (
|
||||||
|
tomo_fit_xray_eye[1][0] * np.sin(np.radians(angle) + tomo_fit_xray_eye[1][1])
|
||||||
|
+ tomo_fit_xray_eye[1][2]
|
||||||
|
) / 1000
|
||||||
|
|
||||||
|
print(f"Xeye correction x {correction_x}, y {correction_y} for angle {angle}\n")
|
||||||
|
return (correction_x, correction_y)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Additional lookup-table corrections (iteration 1 and 2)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def read_additional_correction(self, correction_file: str):
|
||||||
|
self.corr_pos_x, self.corr_pos_y, self.corr_angle = self._read_correction_file_xy(
|
||||||
|
correction_file
|
||||||
|
)
|
||||||
|
|
||||||
|
def read_additional_correction_2(self, correction_file: str):
|
||||||
|
self.corr_pos_x_2, self.corr_pos_y_2, self.corr_angle_2 = self._read_correction_file_xy(
|
||||||
|
correction_file
|
||||||
|
)
|
||||||
|
|
||||||
|
def _read_correction_file_xy(self, correction_file: str):
|
||||||
|
"""Parse a correction file that contains corr_pos_x, corr_pos_y and corr_angle entries."""
|
||||||
|
with open(correction_file, "r") as f:
|
||||||
|
num_elements = f.readline()
|
||||||
|
int_num_elements = int(num_elements.split(" ")[2])
|
||||||
|
print(int_num_elements)
|
||||||
|
corr_pos_x = []
|
||||||
|
corr_pos_y = []
|
||||||
|
corr_angle = []
|
||||||
|
for j in range(0, int_num_elements * 3):
|
||||||
|
line = f.readline()
|
||||||
|
value = line.split(" ")[2]
|
||||||
|
name = line.split(" ")[0].split("[")[0]
|
||||||
|
if name == "corr_pos_x":
|
||||||
|
corr_pos_x.append(float(value) / 1000)
|
||||||
|
elif name == "corr_pos_y":
|
||||||
|
corr_pos_y.append(float(value) / 1000)
|
||||||
|
elif name == "corr_angle":
|
||||||
|
corr_angle.append(float(value))
|
||||||
|
return corr_pos_x, corr_pos_y, corr_angle
|
||||||
|
|
||||||
|
def compute_additional_correction(self, angle):
|
||||||
|
return self._compute_correction_xy(
|
||||||
|
angle, self.corr_pos_x, self.corr_pos_y, self.corr_angle, label="1"
|
||||||
|
)
|
||||||
|
|
||||||
|
def compute_additional_correction_2(self, angle):
|
||||||
|
return self._compute_correction_xy(
|
||||||
|
angle, self.corr_pos_x_2, self.corr_pos_y_2, self.corr_angle_2, label="2"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _compute_correction_xy(self, angle, corr_pos_x, corr_pos_y, corr_angle, label=""):
|
||||||
|
"""Find the correction for the closest angle in the lookup table."""
|
||||||
|
if not corr_pos_x:
|
||||||
|
print(f"Not applying additional correction {label}. No data available.\n")
|
||||||
|
return (0, 0)
|
||||||
|
|
||||||
|
shift_x = corr_pos_x[0]
|
||||||
|
shift_y = corr_pos_y[0]
|
||||||
|
angledelta = np.fabs(corr_angle[0] - angle)
|
||||||
|
|
||||||
|
for j in range(1, len(corr_pos_x)):
|
||||||
|
newangledelta = np.fabs(corr_angle[j] - angle)
|
||||||
|
if newangledelta < angledelta:
|
||||||
|
shift_x = corr_pos_x[j]
|
||||||
|
shift_y = corr_pos_y[j]
|
||||||
|
angledelta = newangledelta
|
||||||
|
|
||||||
|
if shift_x == 0 and angle < corr_angle[0]:
|
||||||
|
shift_x = corr_pos_x[0]
|
||||||
|
shift_y = corr_pos_y[0]
|
||||||
|
|
||||||
|
if shift_x == 0 and angle > corr_angle[-1]:
|
||||||
|
shift_x = corr_pos_x[-1]
|
||||||
|
shift_y = corr_pos_y[-1]
|
||||||
|
|
||||||
|
print(f"Additional correction shifts {label}: {shift_x} {shift_y}")
|
||||||
|
return (shift_x, shift_y)
|
||||||
211
csaxs_bec/bec_ipython_client/plugins/LamNI/extra_tomo.py
Normal file
211
csaxs_bec/bec_ipython_client/plugins/LamNI/extra_tomo.py
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
"""
|
||||||
|
extra_tomo.py
|
||||||
|
=============
|
||||||
|
Specialist LamNI subclasses for specific experimental configurations.
|
||||||
|
Import explicitly when needed, e.g.:
|
||||||
|
|
||||||
|
from csaxs_bec...extra_tomo import MagLamNI
|
||||||
|
from csaxs_bec...extra_tomo import DataDrivenLamNI
|
||||||
|
"""
|
||||||
|
|
||||||
|
import builtins
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import h5py
|
||||||
|
import numpy as np
|
||||||
|
from bec_lib import bec_logger
|
||||||
|
from bec_lib.alarm_handler import AlarmBase
|
||||||
|
|
||||||
|
from .lamni import LamNI
|
||||||
|
|
||||||
|
logger = bec_logger.logger
|
||||||
|
|
||||||
|
if builtins.__dict__.get("bec") is not None:
|
||||||
|
bec = builtins.__dict__.get("bec")
|
||||||
|
dev = builtins.__dict__.get("dev")
|
||||||
|
umv = builtins.__dict__.get("umv")
|
||||||
|
scans = builtins.__dict__.get("scans")
|
||||||
|
|
||||||
|
|
||||||
|
class MagLamNI(LamNI):
|
||||||
|
"""LamNI subclass for magnetic experiments (XMCD).
|
||||||
|
|
||||||
|
Adds a slow rotation helper and allows injection of a custom
|
||||||
|
per-angle callback via the ``lamni_at_each_angle`` builtin.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def sub_tomo_scan(self, subtomo_number, start_angle=None):
|
||||||
|
super().sub_tomo_scan(subtomo_number, start_angle)
|
||||||
|
# self.rotate_slowly(0)
|
||||||
|
|
||||||
|
def rotate_slowly(self, angle, step_size=20):
|
||||||
|
"""Rotate to target angle in small steps to avoid mechanical stress."""
|
||||||
|
current_angle = dev.lsamrot.read(cached=True)["value"]
|
||||||
|
steps = int(np.ceil(np.abs(current_angle - angle) / step_size)) + 1
|
||||||
|
for target_angle in np.linspace(current_angle, angle, steps, endpoint=True):
|
||||||
|
umv(dev.lsamrot, target_angle)
|
||||||
|
scans.lamni_move_to_scan_center(
|
||||||
|
self.align.tomo_fovx_offset, self.align.tomo_fovy_offset, target_angle
|
||||||
|
)
|
||||||
|
|
||||||
|
def _at_each_angle(self, angle: float) -> None:
|
||||||
|
if "lamni_at_each_angle" in builtins.__dict__:
|
||||||
|
# pylint: disable=undefined-variable
|
||||||
|
lamni_at_each_angle(self, angle)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.tomo_scan_projection(angle)
|
||||||
|
self.tomo_reconstruct()
|
||||||
|
|
||||||
|
|
||||||
|
class DataDrivenLamNI(LamNI):
|
||||||
|
"""LamNI subclass that reads per-projection scan parameters from an HDF5 file.
|
||||||
|
|
||||||
|
Instead of a fixed FOV and step size for the whole tomogram, each
|
||||||
|
projection can have individual values for step size, loptz position
|
||||||
|
and lateral shifts, as specified in a datadriven_params.h5 file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, client):
|
||||||
|
super().__init__(client)
|
||||||
|
self.tomo_data = {}
|
||||||
|
|
||||||
|
def tomo_scan(
|
||||||
|
self,
|
||||||
|
subtomo_start=1,
|
||||||
|
start_index=None,
|
||||||
|
fname="~/Data10/data_driven_config/datadriven_params.h5",
|
||||||
|
):
|
||||||
|
"""Start a data-driven tomo scan.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subtomo_start (int): Unused; kept for API compatibility. Use start_index to resume.
|
||||||
|
start_index (int, optional): Skip projections before this index. Defaults to None.
|
||||||
|
fname (str): Path to the HDF5 parameter file. Defaults to the standard location.
|
||||||
|
"""
|
||||||
|
bec = builtins.__dict__.get("bec")
|
||||||
|
scans = builtins.__dict__.get("scans")
|
||||||
|
|
||||||
|
fname = os.path.expanduser(fname)
|
||||||
|
if not os.path.exists(fname):
|
||||||
|
raise FileNotFoundError(f"Could not find datadriven params file in {fname}.")
|
||||||
|
|
||||||
|
content = f"Loading tomo parameters from {fname}."
|
||||||
|
logger.warning(content)
|
||||||
|
msg = bec.logbook.LogbookMessage()
|
||||||
|
msg.add_text(content).add_tag(["Data_driven_file", "BEC"])
|
||||||
|
self.client.logbook.send_logbook_message(msg)
|
||||||
|
|
||||||
|
self._update_tomo_data_from_file(fname)
|
||||||
|
self._current_special_angles = self.special_angles.copy()
|
||||||
|
|
||||||
|
if subtomo_start == 1 and start_index is None:
|
||||||
|
self.tomo_id = self.add_sample_database(
|
||||||
|
self.sample_name,
|
||||||
|
str(datetime.date.today()),
|
||||||
|
bec.active_account.decode(),
|
||||||
|
bec.queue.next_scan_number,
|
||||||
|
"lamni",
|
||||||
|
"test additional info",
|
||||||
|
"BEC",
|
||||||
|
)
|
||||||
|
self.write_pdf_report()
|
||||||
|
|
||||||
|
with scans.dataset_id_on_hold:
|
||||||
|
self.sub_tomo_data_driven(start_index)
|
||||||
|
|
||||||
|
def sub_tomo_scan(self, subtomo_number=None, start_angle=None):
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Cannot run sub_tomo_scan with DataDrivenLamNI. "
|
||||||
|
"Use lamni.tomo_scan(start_index=<N>) to resume instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _at_each_angle(
|
||||||
|
self, angle=None, stepsize=None, loptz_pos=None, manual_shift_x=0, manual_shift_y=0
|
||||||
|
):
|
||||||
|
self.manual_shift_x = manual_shift_x
|
||||||
|
self.manual_shift_y = manual_shift_y
|
||||||
|
self.tomo_shellstep = stepsize
|
||||||
|
if loptz_pos is not None:
|
||||||
|
dev.rtx.controller.feedback_disable()
|
||||||
|
umv(dev.loptz, loptz_pos)
|
||||||
|
super()._at_each_angle(angle=angle)
|
||||||
|
|
||||||
|
def sub_tomo_data_driven(self, start_index=None):
|
||||||
|
"""Iterate over all projections defined in the loaded HDF5 parameter file."""
|
||||||
|
for scan_index, scan_data in enumerate(zip(*self.tomo_data.values())):
|
||||||
|
if start_index and scan_index < start_index:
|
||||||
|
continue
|
||||||
|
(
|
||||||
|
angle,
|
||||||
|
stepsize,
|
||||||
|
loptz_pos,
|
||||||
|
propagation_distance,
|
||||||
|
manual_shift_x,
|
||||||
|
manual_shift_y,
|
||||||
|
subtomo_number,
|
||||||
|
) = scan_data
|
||||||
|
bec.metadata.update(
|
||||||
|
{key: float(val) for key, val in zip(self.tomo_data.keys(), scan_data)}
|
||||||
|
)
|
||||||
|
successful = False
|
||||||
|
error_caught = False
|
||||||
|
if 0 <= angle < 360.05:
|
||||||
|
print(f"Starting LamNI scan for angle {angle}")
|
||||||
|
while not successful:
|
||||||
|
self._start_beam_check()
|
||||||
|
if not self.special_angles:
|
||||||
|
self._current_special_angles = []
|
||||||
|
if self._current_special_angles:
|
||||||
|
next_special_angle = self._current_special_angles[0]
|
||||||
|
if np.isclose(angle, next_special_angle, atol=0.5):
|
||||||
|
self._current_special_angles.pop(0)
|
||||||
|
num_repeats = self.special_angle_repeats
|
||||||
|
else:
|
||||||
|
num_repeats = 1
|
||||||
|
try:
|
||||||
|
start_scan_number = bec.queue.next_scan_number
|
||||||
|
for i in range(num_repeats):
|
||||||
|
self._at_each_angle(
|
||||||
|
float(angle),
|
||||||
|
stepsize=float(stepsize),
|
||||||
|
loptz_pos=float(loptz_pos),
|
||||||
|
manual_shift_x=float(manual_shift_x),
|
||||||
|
manual_shift_y=float(manual_shift_y),
|
||||||
|
)
|
||||||
|
error_caught = False
|
||||||
|
except AlarmBase as exc:
|
||||||
|
if exc.alarm_type == "TimeoutError":
|
||||||
|
bec.queue.request_queue_reset()
|
||||||
|
time.sleep(2)
|
||||||
|
error_caught = True
|
||||||
|
else:
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
if self._was_beam_okay() and not error_caught:
|
||||||
|
successful = True
|
||||||
|
else:
|
||||||
|
self._wait_for_beamline_checks()
|
||||||
|
|
||||||
|
end_scan_number = bec.queue.next_scan_number
|
||||||
|
for scan_nr in range(start_scan_number, end_scan_number):
|
||||||
|
self._write_tomo_scan_number(scan_nr, angle, subtomo_number)
|
||||||
|
|
||||||
|
def _update_tomo_data_from_file(self, fname: str) -> None:
|
||||||
|
"""Load projection parameters from the HDF5 file into self.tomo_data."""
|
||||||
|
with h5py.File(fname, "r") as file:
|
||||||
|
self.tomo_data["theta"] = np.array([*file["theta"]]).flatten()
|
||||||
|
self.tomo_data["stepsize"] = np.array([*file["stepsize"]]).flatten()
|
||||||
|
self.tomo_data["loptz"] = np.array([*file["loptz"]]).flatten()
|
||||||
|
self.tomo_data["propagation_distance"] = np.array(
|
||||||
|
[*file["relative_propagation_distance"]]
|
||||||
|
).flatten()
|
||||||
|
self.tomo_data["manual_shift_x"] = np.array([*file["manual_shift_x"]]).flatten()
|
||||||
|
self.tomo_data["manual_shift_y"] = np.array([*file["manual_shift_y"]]).flatten()
|
||||||
|
self.tomo_data["subtomo_id"] = np.array([*file["subtomo_id"]]).flatten()
|
||||||
|
|
||||||
|
shapes = [data.shape for data in self.tomo_data.values()]
|
||||||
|
if len(set(shapes)) > 1:
|
||||||
|
raise ValueError(f"Tomo data file has entries of inconsistent lengths: {shapes}.")
|
||||||
1015
csaxs_bec/bec_ipython_client/plugins/LamNI/lamni.py
Normal file
1015
csaxs_bec/bec_ipython_client/plugins/LamNI/lamni.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,8 +6,8 @@ from rich.console import Console
|
|||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
from csaxs_bec.bec_ipython_client.plugins.cSAXS import epics_put, fshclose
|
from csaxs_bec.bec_ipython_client.plugins.cSAXS import epics_put, fshclose
|
||||||
|
from csaxs_bec.bec_ipython_client.plugins.omny.omny_general_tools import OMNYTools
|
||||||
|
|
||||||
# import builtins to avoid linter errors
|
|
||||||
dev = builtins.__dict__.get("dev")
|
dev = builtins.__dict__.get("dev")
|
||||||
umv = builtins.__dict__.get("umv")
|
umv = builtins.__dict__.get("umv")
|
||||||
bec = builtins.__dict__.get("bec")
|
bec = builtins.__dict__.get("bec")
|
||||||
@@ -17,26 +17,31 @@ class LamNIInitError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class LaMNIInitStagesMixin:
|
class LaMNIInitStages:
|
||||||
|
"""Handles hardware initialization and referencing of LamNI stages."""
|
||||||
|
|
||||||
|
def __init__(self, client):
|
||||||
|
super().__init__()
|
||||||
|
self.client = client
|
||||||
|
self.OMNYTools = OMNYTools(self.client)
|
||||||
|
|
||||||
def lamni_init_stages(self):
|
def lamni_init_stages(self):
|
||||||
user_input = input("Starting initialization of LamNI stages. OK? [y/n]")
|
|
||||||
if user_input == "y":
|
if self.OMNYTools.yesno("Start initialization of LamNI stages. OK?"):
|
||||||
print("staring...")
|
print("starting...")
|
||||||
dev.lsamrot.enabled = True
|
dev.lsamrot.enabled = True
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.check_all_axes_of_lamni_referenced():
|
if self.check_all_axes_of_lamni_referenced():
|
||||||
user_input = input("Continue anyways? [y/n]")
|
if self.OMNYTools.yesno("All axes are referenced. Continue anyways?"):
|
||||||
if user_input == "y":
|
|
||||||
print("ok then...")
|
print("ok then...")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
axis_id_lsamrot = dev.lsamrot._config["deviceConfig"].get("axis_Id")
|
axis_id_lsamrot = dev.lsamrot._config["deviceConfig"].get("axis_Id")
|
||||||
if dev.lsamrot.controller.get_motor_limit_switch(axis_id_lsamrot)[1] == False:
|
if dev.lsamrot.controller.get_motor_limit_switch(axis_id_lsamrot)[1] == False:
|
||||||
user_input = input("The rotation stage will be moved to one limit [y/n]")
|
if self.OMNYTools.yesno("The rotation stage will be moved to one limit"):
|
||||||
if user_input == "y":
|
|
||||||
print("starting...")
|
print("starting...")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -47,10 +52,9 @@ class LaMNIInitStagesMixin:
|
|||||||
print("The controller will be disabled in bec. To enable dev.lsamrot.enabled=True")
|
print("The controller will be disabled in bec. To enable dev.lsamrot.enabled=True")
|
||||||
return
|
return
|
||||||
|
|
||||||
user_input = input(
|
if self.OMNYTools.yesno(
|
||||||
"Init of loptz. Can the stage move to the upstream limit without collision?? [y/n]"
|
"Init of loptz. Can the stage move to the upstream limit without collision?"
|
||||||
)
|
):
|
||||||
if user_input == "y":
|
|
||||||
print("ok then...")
|
print("ok then...")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -75,14 +79,14 @@ class LaMNIInitStagesMixin:
|
|||||||
self.drive_axis_to_limit(dev.lsamy, "reverse")
|
self.drive_axis_to_limit(dev.lsamy, "reverse")
|
||||||
self.find_reference_mark(dev.lsamy)
|
self.find_reference_mark(dev.lsamy)
|
||||||
|
|
||||||
# the dual encoder requires the reference mark to pass on both encoders
|
|
||||||
print("Referencing lsamrot")
|
print("Referencing lsamrot")
|
||||||
self.drive_axis_to_limit(dev.lsamrot, "reverse")
|
self.drive_axis_to_limit(dev.lsamrot, "reverse")
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
self.find_reference_mark(dev.lsamrot)
|
self.find_reference_mark(dev.lsamrot)
|
||||||
|
|
||||||
user_input = input("Init of leye. Can the stage move to -x limit without collision? [y/n]")
|
if self.OMNYTools.yesno(
|
||||||
if user_input == "y":
|
"Init of leye. Can the stage move to -x limit without collision?"
|
||||||
|
):
|
||||||
print("starting...")
|
print("starting...")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -92,15 +96,6 @@ class LaMNIInitStagesMixin:
|
|||||||
print("Referencing leyey")
|
print("Referencing leyey")
|
||||||
self.drive_axis_to_limit(dev.leyey, "forward")
|
self.drive_axis_to_limit(dev.leyey, "forward")
|
||||||
|
|
||||||
# set_lm lsamx 6 14
|
|
||||||
# set_lm lsamy 6 14
|
|
||||||
# set_lm lsamrot -3 362
|
|
||||||
# set_lm loptx -1 -0.2
|
|
||||||
# set_lm lopty 3.0 3.6
|
|
||||||
# set_lm loptz 82 87
|
|
||||||
# set_lm leyex 0 25
|
|
||||||
# set_lm leyey 0.5 50
|
|
||||||
|
|
||||||
print("Init of Smaract stages")
|
print("Init of Smaract stages")
|
||||||
dev.losax.controller.find_reference_mark(2, 0, 1000, 1)
|
dev.losax.controller.find_reference_mark(2, 0, 1000, 1)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -108,15 +103,6 @@ class LaMNIInitStagesMixin:
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
dev.losax.controller.find_reference_mark(1, 0, 1000, 1)
|
dev.losax.controller.find_reference_mark(1, 0, 1000, 1)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
# dev.losax.controller.find_reference_mark(3, 1, 1000, 1)
|
|
||||||
# time.sleep(1)
|
|
||||||
# dev.losax.controller.find_reference_mark(4, 1, 1000, 1)
|
|
||||||
# time.sleep(1)
|
|
||||||
|
|
||||||
# set_lm losax -1.5 0.25
|
|
||||||
# set_lm losay -2.5 4.1
|
|
||||||
# set_lm losaz -4.1 -0.5
|
|
||||||
# set_lm lcsy -1.5 5
|
|
||||||
|
|
||||||
self._align_setup()
|
self._align_setup()
|
||||||
|
|
||||||
@@ -134,8 +120,7 @@ class LaMNIInitStagesMixin:
|
|||||||
return ord(axis_id.lower()) - 97
|
return ord(axis_id.lower()) - 97
|
||||||
|
|
||||||
def _align_setup(self):
|
def _align_setup(self):
|
||||||
user_input = input("Start moving stages to default initial positions? [y/n]")
|
if self.OMNYTools.yesno("Start moving stages to default initial positions?"):
|
||||||
if user_input == "y":
|
|
||||||
print("Start moving stages...")
|
print("Start moving stages...")
|
||||||
else:
|
else:
|
||||||
print("Stopping.")
|
print("Stopping.")
|
||||||
@@ -174,6 +159,8 @@ class LaMNIInitStagesMixin:
|
|||||||
|
|
||||||
|
|
||||||
class LamNIOpticsMixin:
|
class LamNIOpticsMixin:
|
||||||
|
"""Optics movement methods: FZP, OSA, central stop and X-ray eye."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_user_param_safe(device, var):
|
def _get_user_param_safe(device, var):
|
||||||
param = dev[device].user_parameter
|
param = dev[device].user_parameter
|
||||||
@@ -188,13 +175,11 @@ class LamNIOpticsMixin:
|
|||||||
umv(dev.leyey, leyey_out)
|
umv(dev.leyey, leyey_out)
|
||||||
|
|
||||||
epics_put("XOMNYI-XEYE-ACQ:0", 2)
|
epics_put("XOMNYI-XEYE-ACQ:0", 2)
|
||||||
# move rotation stage to zero to avoid problems with wires
|
|
||||||
umv(dev.lsamrot, 0)
|
umv(dev.lsamrot, 0)
|
||||||
umv(dev.dttrz, 5854, dev.fttrz, 2395)
|
umv(dev.dttrz, 5854, dev.fttrz, 2395)
|
||||||
|
|
||||||
def leye_in(self):
|
def leye_in(self):
|
||||||
bec.queue.next_dataset_number += 1
|
bec.queue.next_dataset_number += 1
|
||||||
# move rotation stage to zero to avoid problems with wires
|
|
||||||
umv(dev.lsamrot, 0)
|
umv(dev.lsamrot, 0)
|
||||||
umv(dev.dttrz, 6419.677, dev.fttrz, 2959.979)
|
umv(dev.dttrz, 6419.677, dev.fttrz, 2959.979)
|
||||||
while True:
|
while True:
|
||||||
@@ -211,15 +196,10 @@ class LamNIOpticsMixin:
|
|||||||
def _lfzp_in(self):
|
def _lfzp_in(self):
|
||||||
loptx_in = self._get_user_param_safe("loptx", "in")
|
loptx_in = self._get_user_param_safe("loptx", "in")
|
||||||
lopty_in = self._get_user_param_safe("lopty", "in")
|
lopty_in = self._get_user_param_safe("lopty", "in")
|
||||||
umv(
|
umv(dev.loptx, loptx_in, dev.lopty, lopty_in)
|
||||||
dev.loptx, loptx_in, dev.lopty, lopty_in
|
|
||||||
) # for 7.2567 keV and 150 mu, 60 nm fzp, loptz 83.6000 for propagation 1.4 mm
|
|
||||||
|
|
||||||
def lfzp_in(self):
|
def lfzp_in(self):
|
||||||
"""
|
"""Move in the LamNI zone plate, disabling/re-enabling RT feedback around the move."""
|
||||||
move in the lamni zone plate.
|
|
||||||
This will disable rt feedback, move the FZP and re-enabled the feedback.
|
|
||||||
"""
|
|
||||||
if "rtx" in dev and dev.rtx.enabled:
|
if "rtx" in dev and dev.rtx.enabled:
|
||||||
dev.rtx.controller.feedback_disable()
|
dev.rtx.controller.feedback_disable()
|
||||||
|
|
||||||
@@ -229,18 +209,15 @@ class LamNIOpticsMixin:
|
|||||||
dev.rtx.controller.feedback_enable_with_reset()
|
dev.rtx.controller.feedback_enable_with_reset()
|
||||||
|
|
||||||
def loptics_in(self):
|
def loptics_in(self):
|
||||||
"""
|
"""Move in the LamNI optics (FZP + OSA)."""
|
||||||
Move in the lamni optics, including the FZP and the OSA.
|
|
||||||
"""
|
|
||||||
self.lfzp_in()
|
self.lfzp_in()
|
||||||
self.losa_in()
|
self.losa_in()
|
||||||
|
|
||||||
def loptics_out(self):
|
def loptics_out(self):
|
||||||
"""Move out the lamni optics"""
|
"""Move out the LamNI optics."""
|
||||||
if "rtx" in dev and dev.rtx.enabled:
|
if "rtx" in dev and dev.rtx.enabled:
|
||||||
dev.rtx.controller.feedback_disable()
|
dev.rtx.controller.feedback_disable()
|
||||||
|
|
||||||
# self.lcs_out()
|
|
||||||
self.losa_out()
|
self.losa_out()
|
||||||
loptx_out = self._get_user_param_safe("loptx", "out")
|
loptx_out = self._get_user_param_safe("loptx", "out")
|
||||||
lopty_out = self._get_user_param_safe("lopty", "out")
|
lopty_out = self._get_user_param_safe("lopty", "out")
|
||||||
@@ -251,28 +228,17 @@ class LamNIOpticsMixin:
|
|||||||
dev.rtx.controller.feedback_enable_with_reset()
|
dev.rtx.controller.feedback_enable_with_reset()
|
||||||
|
|
||||||
def lcs_in(self):
|
def lcs_in(self):
|
||||||
# umv lcsx -1.852 lcsy -0.095
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def lcs_out(self):
|
def lcs_out(self):
|
||||||
umv(dev.lcsy, 3)
|
umv(dev.lcsy, 3)
|
||||||
|
|
||||||
def losa_in(self):
|
def losa_in(self):
|
||||||
# 6.2 keV, 170 um FZP
|
|
||||||
# umv(dev.losax, -1.4450000, dev.losay, -0.1800)
|
|
||||||
# umv(dev.losaz, -1)
|
|
||||||
# 6.7, 170
|
|
||||||
# umv(dev.losax, -1.4850, dev.losay, -0.1930)
|
|
||||||
# umv(dev.losaz, 1.0000)
|
|
||||||
# 7.2, 150
|
|
||||||
losax_in = self._get_user_param_safe("losax", "in")
|
losax_in = self._get_user_param_safe("losax", "in")
|
||||||
losay_in = self._get_user_param_safe("losay", "in")
|
losay_in = self._get_user_param_safe("losay", "in")
|
||||||
losaz_in = self._get_user_param_safe("losaz", "in")
|
losaz_in = self._get_user_param_safe("losaz", "in")
|
||||||
umv(dev.losax, losax_in, dev.losay, losay_in)
|
umv(dev.losax, losax_in, dev.losay, losay_in)
|
||||||
umv(dev.losaz, losaz_in)
|
umv(dev.losaz, losaz_in)
|
||||||
# 11 kev
|
|
||||||
# umv(dev.losax, -1.161000, dev.losay, -0.196)
|
|
||||||
# umv(dev.losaz, 1.0000)
|
|
||||||
|
|
||||||
def losa_out(self):
|
def losa_out(self):
|
||||||
losay_out = self._get_user_param_safe("losay", "out")
|
losay_out = self._get_user_param_safe("losay", "out")
|
||||||
@@ -281,11 +247,10 @@ class LamNIOpticsMixin:
|
|||||||
umv(dev.losay, losay_out)
|
umv(dev.losay, losay_out)
|
||||||
|
|
||||||
def lfzp_info(self, mokev_val=-1):
|
def lfzp_info(self, mokev_val=-1):
|
||||||
|
|
||||||
if mokev_val == -1:
|
if mokev_val == -1:
|
||||||
try:
|
try:
|
||||||
mokev_val = dev.mokev.readback.get()
|
mokev_val = dev.mokev.readback.get()
|
||||||
except:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"Device mokev does not exist. You can specify the energy in keV as an argument instead."
|
"Device mokev does not exist. You can specify the energy in keV as an argument instead."
|
||||||
)
|
)
|
||||||
@@ -320,10 +285,6 @@ class LamNIOpticsMixin:
|
|||||||
)
|
)
|
||||||
|
|
||||||
console.print(table)
|
console.print(table)
|
||||||
|
|
||||||
print("OSA Information:")
|
|
||||||
# print(f"Current losaz %.1f\n", A[losaz])
|
|
||||||
# print("The OSA will collide with the sample plane at %.1f\n\n", 89.3-A[loptz])
|
|
||||||
print(
|
print(
|
||||||
"The numbers presented here are for a sample in the plane of the lamni sample holder.\n"
|
"The numbers presented here are for a sample in the plane of the lamni sample holder.\n"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
def lamni_read_additional_correction():
|
|
||||||
# "additional_correction_shift"
|
|
||||||
# [0][] x , [1][] y, [2][] angle, [3][0] number of elements
|
|
||||||
|
|
||||||
with open("correction_lamni_um_S01405_.txt", "r") as f:
|
|
||||||
num_elements = f.readline()
|
|
||||||
int_num_elements = int(num_elements.split(" ")[2])
|
|
||||||
print(int_num_elements)
|
|
||||||
corr_pos_x = []
|
|
||||||
corr_pos_y = []
|
|
||||||
corr_angle = []
|
|
||||||
for j in range(0, int_num_elements * 3):
|
|
||||||
line = f.readline()
|
|
||||||
value = line.split(" ")[2]
|
|
||||||
name = line.split(" ")[0].split("[")[0]
|
|
||||||
if name == "corr_pos_x":
|
|
||||||
corr_pos_x.append(value)
|
|
||||||
elif name == "corr_pos_y":
|
|
||||||
corr_pos_y.append(value)
|
|
||||||
elif name == "corr_angle":
|
|
||||||
corr_angle.append(value)
|
|
||||||
return (corr_pos_x, corr_pos_y, corr_angle, num_elements)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -35,32 +35,32 @@ class FlomniInitError(Exception):
|
|||||||
class FlomniError(Exception):
|
class FlomniError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class FlomniTools:
|
# class FlomniTools:
|
||||||
def yesno(self, message: str, default="none", autoconfirm=0) -> bool:
|
# def yesno(self, message: str, default="none", autoconfirm=0) -> bool:
|
||||||
if autoconfirm and default == "y":
|
# if autoconfirm and default == "y":
|
||||||
self.printgreen(message + " Automatically confirming default: yes")
|
# self.printgreen(message + " Automatically confirming default: yes")
|
||||||
return True
|
# return True
|
||||||
elif autoconfirm and default == "n":
|
# elif autoconfirm and default == "n":
|
||||||
self.printgreen(message + " Automatically confirming default: no")
|
# self.printgreen(message + " Automatically confirming default: no")
|
||||||
return False
|
# return False
|
||||||
if default == "y":
|
# if default == "y":
|
||||||
message_ending = " [Y]/n? "
|
# message_ending = " [Y]/n? "
|
||||||
elif default == "n":
|
# elif default == "n":
|
||||||
message_ending = " y/[N]? "
|
# message_ending = " y/[N]? "
|
||||||
else:
|
# else:
|
||||||
message_ending = " y/n? "
|
# message_ending = " y/n? "
|
||||||
while True:
|
# while True:
|
||||||
user_input = input(self.OKBLUE + message + message_ending + self.ENDC)
|
# user_input = input(self.OKBLUE + message + message_ending + self.ENDC)
|
||||||
if (
|
# if (
|
||||||
user_input == "Y" or user_input == "y" or user_input == "yes" or user_input == "Yes"
|
# user_input == "Y" or user_input == "y" or user_input == "yes" or user_input == "Yes"
|
||||||
) or (default == "y" and user_input == ""):
|
# ) or (default == "y" and user_input == ""):
|
||||||
return True
|
# return True
|
||||||
if (
|
# if (
|
||||||
user_input == "N" or user_input == "n" or user_input == "no" or user_input == "No"
|
# user_input == "N" or user_input == "n" or user_input == "no" or user_input == "No"
|
||||||
) or (default == "n" and user_input == ""):
|
# ) or (default == "n" and user_input == ""):
|
||||||
return False
|
# return False
|
||||||
else:
|
# else:
|
||||||
print("Please expicitely confirm y or n.")
|
# print("Please expicitely confirm y or n.")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,17 @@ eiger_1_5:
|
|||||||
readoutPriority: async
|
readoutPriority: async
|
||||||
softwareTrigger: False
|
softwareTrigger: False
|
||||||
|
|
||||||
|
eiger_9:
|
||||||
|
description: Eiger 9M detector
|
||||||
|
deviceClass: csaxs_bec.devices.jungfraujoch.eiger_9m.Eiger9M
|
||||||
|
deviceConfig:
|
||||||
|
detector_distance: 100
|
||||||
|
beam_center: [0, 0]
|
||||||
|
onFailure: raise
|
||||||
|
enabled: true
|
||||||
|
readoutPriority: async
|
||||||
|
softwareTrigger: False
|
||||||
|
|
||||||
ids_cam:
|
ids_cam:
|
||||||
description: IDS camera for live image acquisition
|
description: IDS camera for live image acquisition
|
||||||
deviceClass: csaxs_bec.devices.ids_cameras.IDSCamera
|
deviceClass: csaxs_bec.devices.ids_cameras.IDSCamera
|
||||||
|
|||||||
@@ -317,10 +317,14 @@ class MCSCardCSAXS(PSIDeviceBase, MCSCard):
|
|||||||
old_value: Previous value of the signal.
|
old_value: Previous value of the signal.
|
||||||
value: New value of the signal.
|
value: New value of the signal.
|
||||||
"""
|
"""
|
||||||
scan_done = bool(value == self._num_total_triggers)
|
try:
|
||||||
self.progress.put(value=value, max_value=self._num_total_triggers, done=scan_done)
|
scan_done = bool(value == self._num_total_triggers)
|
||||||
if scan_done:
|
self.progress.put(value=value, max_value=self._num_total_triggers, done=scan_done)
|
||||||
self._scan_done_event.set()
|
if scan_done:
|
||||||
|
self._scan_done_event.set()
|
||||||
|
except Exception:
|
||||||
|
content = traceback.format_exc()
|
||||||
|
logger.info(f"Device {self.name} error: {content}")
|
||||||
|
|
||||||
def on_stage(self) -> None:
|
def on_stage(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
48
csaxs_bec/devices/jungfraujoch/README.MD
Normal file
48
csaxs_bec/devices/jungfraujoch/README.MD
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Overview
|
||||||
|
Integration module for Eiger detectors at the cSAXS beamline with JungfrauJoch backend.
|
||||||
|
There are currently two supported Eiger detectors:
|
||||||
|
- EIGER 1.5M
|
||||||
|
- EIGER 9M
|
||||||
|
|
||||||
|
This module provides a base integration for both detectors. A short list of useful
|
||||||
|
information is also provided below.
|
||||||
|
|
||||||
|
## JungfrauJoch Service
|
||||||
|
The JungfrauJoch WEB UI is available on http://sls-jfjoch-001:8080. This is an interface
|
||||||
|
to the broker which runs on sls-jfjoch-001.psi.ch. The writer service runs on
|
||||||
|
xbl-daq-34.psi.ch. Permissions to get access to these machines and run systemctl or
|
||||||
|
journalctl commands can be requested with the Infrastructure and Services group in AWI.
|
||||||
|
Beamline scientists need to check if they have the necessary permissions to connect
|
||||||
|
to these machines and run the commands below.
|
||||||
|
|
||||||
|
Useful commands for the broker service on sls-jfjoch-001.psi.ch:
|
||||||
|
- sudo systemctl status jfjoch_broker # Check status
|
||||||
|
- sudo systemctl start jfjoch_broker # Start service
|
||||||
|
- sudo systemctl stop jfjoch_broker # Stop service
|
||||||
|
- sudo systemctl restart jfjoch_broker # Restart service
|
||||||
|
|
||||||
|
For the writer service on xbl-daq-34.psi.ch:
|
||||||
|
- sudo journalctl -u jfjoch_writer -f # streams live logs
|
||||||
|
- sudo systemctl status jfjoch_writer # Check status
|
||||||
|
- sudo systemctl start jfjoch_writer # Start service
|
||||||
|
- sudo systemctl stop jfjoch_writer # Stop service
|
||||||
|
- sudo systemctl restart jfjoch_writer # Restart service
|
||||||
|
|
||||||
|
More information about the JungfrauJoch and API client can be found at: (https://jungfraujoch.readthedocs.io/en/latest/index.html)
|
||||||
|
|
||||||
|
### JungfrauJoch API Client
|
||||||
|
A thin wrapper for the JungfrauJoch API client is provided in the [jungfrau_joch_client](./jungfrau_joch_client.py).
|
||||||
|
Details about the specific integration are provided in the code.
|
||||||
|
|
||||||
|
|
||||||
|
## Eiger implementation
|
||||||
|
The Eiger detector integration is provided in the [eiger.py](./eiger.py) module. It provides a base integration for both Eiger 1.5M and Eiger 9M detectors.
|
||||||
|
Logic specific to each detector is implemented in the respective modules:
|
||||||
|
- [eiger_1_5m.py](./eiger_1_5m.py)
|
||||||
|
- [eiger_9m.py](./eiger_9m.py)
|
||||||
|
|
||||||
|
With the current implementation, the detector initialization should be done by a beamline scientist through the JungfrauJoch WEB UI by choosing the
|
||||||
|
appropriate detector (1.5M or 9M) before loading the device config with BEC. BEC will check upon connecting if the selected detector matches the expected one.
|
||||||
|
A preview stream for images is also provided which is forwarded and accessible through the `preview_image` signal.
|
||||||
|
|
||||||
|
For more specific details, please check the code documentation.
|
||||||
@@ -1,34 +1,23 @@
|
|||||||
"""
|
"""
|
||||||
Generic integration of JungfrauJoch backend with Eiger detectors
|
|
||||||
for the cSAXS beamline at the Swiss Light Source.
|
|
||||||
|
|
||||||
The WEB UI is available on http://sls-jfjoch-001:8080
|
Integration module for Eiger detectors at the cSAXS beamline with JungfrauJoch backend.
|
||||||
|
|
||||||
NOTE: this may not be the best place to store this information. It should be migrated to
|
A few notes on setup and operation of the Eiger detectors through the JungfrauJoch broker:
|
||||||
beamline documentation for debugging of Eiger & JungfrauJoch.
|
|
||||||
|
|
||||||
The JungfrauJoch server for cSAXS runs on sls-jfjoch-001.psi.ch
|
|
||||||
User with sufficient rights may use:
|
|
||||||
- sudo systemctl restart jfjoch_broker
|
|
||||||
- sudo systemctl status jfjoch_broker
|
|
||||||
to check and/or restart the broker for the JungfrauJoch server.
|
|
||||||
|
|
||||||
Some extra notes for setting up the detector:
|
|
||||||
- If the energy on JFJ is set via DetectorSettings, the variable in DatasetSettings will be ignored
|
- If the energy on JFJ is set via DetectorSettings, the variable in DatasetSettings will be ignored
|
||||||
- Changes in energy may take time, good to implement logic that only resets energy if needed.
|
- Changes in energy may take time, good to implement logic that only resets energy if needed.
|
||||||
- For the Eiger, the frame_time_us in DetectorSettings is ignored, only the frame_time_us in
|
- For the Eiger, the frame_time_us in DetectorSettings is ignored, only the frame_time_us in
|
||||||
the DatasetSettings is relevant
|
the DatasetSettings is relevant
|
||||||
- The bit_depth will be adjusted automatically based on the exp_time. Here, we need to ensure
|
- The bit_depth will be adjusted automatically based on the exp_time. Here, we need to ensure
|
||||||
that subsequent triggers properly
|
that subsequent triggers properly consider the readout_time of the boards. For the Eiger detectors
|
||||||
consider the readout_time of the boards. For Jungfrau detectors, the difference between
|
at cSAXS, a readout time of 20us is configured through the JungfrauJoch deployment config. This
|
||||||
count_time_us and frame_time_us is the readout_time of the boards. For the Eiger, this needs
|
setting is sufficiently large for the detectors if they run in parallel mode.
|
||||||
to be taken into account during the integration.
|
|
||||||
- beam_center and detector settings are required input arguments, thus, they may be set to wrong
|
- beam_center and detector settings are required input arguments, thus, they may be set to wrong
|
||||||
values for acquisitions to start. Please keep this in mind.
|
values for acquisitions to start. Please keep this in mind.
|
||||||
|
|
||||||
Hardware related notes:
|
Hardware related notes:
|
||||||
- If there is an HW issue with the detector, power cycling may help.
|
- If there is an HW issue with the detector, power cycling may help.
|
||||||
- The sls_detector package is available on console on /sls/X12SA/data/gac-x12sa/erik/micromamba
|
- The sls_detector package is available on console on /sls/x12sa/applications/erik/micromamba
|
||||||
- Run: source setup_9m.sh # Be careful, this connects to the detector, so it should not be
|
- Run: source setup_9m.sh # Be careful, this connects to the detector, so it should not be
|
||||||
used during operation
|
used during operation
|
||||||
- Useful commands:
|
- Useful commands:
|
||||||
@@ -39,9 +28,6 @@ Hardware related notes:
|
|||||||
- cd power_control_user/
|
- cd power_control_user/
|
||||||
- ./on
|
- ./on
|
||||||
- ./off
|
- ./off
|
||||||
|
|
||||||
Further information that may be relevant for debugging:
|
|
||||||
JungfrauJoch - one needs to connect to the jfj-server (sls-jfjoch-001)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
@@ -84,10 +70,19 @@ class EigerError(Exception):
|
|||||||
|
|
||||||
class Eiger(PSIDeviceBase):
|
class Eiger(PSIDeviceBase):
|
||||||
"""
|
"""
|
||||||
Base integration of the Eiger1.5M and Eiger9M at cSAXS. All relevant
|
Base integration of the Eiger1.5M and Eiger9M at cSAXS.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str) : Name of the device
|
||||||
|
detector_name (str): Name of the detector. Supports ["EIGER 9M", "EIGER 8.5M (tmp)", "EIGER 1.5M"]
|
||||||
|
host (str): Hostname of the Jungfrau Joch server.
|
||||||
|
port (int): Port of the Jungfrau Joch server.
|
||||||
|
scan_info (ScanInfo): The scan info to use.
|
||||||
|
device_manager (DeviceManagerDS): The device manager to use.
|
||||||
|
**kwargs: Additional keyword arguments.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
USER_ACCESS = ["detector_distance", "beam_center"]
|
USER_ACCESS = ["set_detector_distance", "set_beam_center"]
|
||||||
|
|
||||||
file_event = Cpt(FileEventSignal, name="file_event")
|
file_event = Cpt(FileEventSignal, name="file_event")
|
||||||
preview_image = Cpt(PreviewSignal, name="preview_image", ndim=2)
|
preview_image = Cpt(PreviewSignal, name="preview_image", ndim=2)
|
||||||
@@ -105,23 +100,12 @@ class Eiger(PSIDeviceBase):
|
|||||||
device_manager=None,
|
device_manager=None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Initialize the PSI Device Base class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str) : Name of the device
|
|
||||||
detector_name (str): Name of the detector. Supports ["EIGER 9M", "EIGER 8.5M (tmp)", "EIGER 1.5M"]
|
|
||||||
host (str): Hostname of the Jungfrau Joch server.
|
|
||||||
port (int): Port of the Jungfrau Joch server.
|
|
||||||
scan_info (ScanInfo): The scan info to use.
|
|
||||||
device_manager (DeviceManagerDS): The device manager to use.
|
|
||||||
**kwargs: Additional keyword arguments.
|
|
||||||
"""
|
|
||||||
super().__init__(name=name, scan_info=scan_info, device_manager=device_manager, **kwargs)
|
super().__init__(name=name, scan_info=scan_info, device_manager=device_manager, **kwargs)
|
||||||
self._host = f"{host}:{port}"
|
self._host = f"{host}:{port}"
|
||||||
self.jfj_client = JungfrauJochClient(host=self._host, parent=self)
|
self.jfj_client = JungfrauJochClient(host=self._host, parent=self)
|
||||||
|
# NOTE: fetch this information from JungfrauJochClient during on_connected!
|
||||||
self.jfj_preview_client = JungfrauJochPreview(
|
self.jfj_preview_client = JungfrauJochPreview(
|
||||||
url="tcp://129.129.95.114:5400", cb=self.preview_image.put
|
url="tcp://129.129.95.114:5400", cb=self._preview_callback
|
||||||
) # IP of sls-jfjoch-001.psi.ch on port 5400 for ZMQ stream
|
) # IP of sls-jfjoch-001.psi.ch on port 5400 for ZMQ stream
|
||||||
self.device_manager = device_manager
|
self.device_manager = device_manager
|
||||||
self.detector_name = detector_name
|
self.detector_name = detector_name
|
||||||
@@ -129,53 +113,102 @@ class Eiger(PSIDeviceBase):
|
|||||||
self._beam_center = beam_center
|
self._beam_center = beam_center
|
||||||
self._readout_time = readout_time
|
self._readout_time = readout_time
|
||||||
self._full_path = ""
|
self._full_path = ""
|
||||||
|
self._num_triggers = 0
|
||||||
|
self._wait_for_on_complete = 20 # seconds
|
||||||
if self.device_manager is not None:
|
if self.device_manager is not None:
|
||||||
self.device_manager: DeviceManagerDS
|
self.device_manager: DeviceManagerDS
|
||||||
|
|
||||||
|
def _preview_callback(self, message: dict) -> None:
|
||||||
|
"""
|
||||||
|
Callback method for handling preview messages as received from the JungfrauJoch preview stream.
|
||||||
|
These messages are dictionary dumps as described in the JFJ ZMQ preview stream documentation.
|
||||||
|
(https://jungfraujoch.readthedocs.io/en/latest/ZEROMQ_STREAM.html#preview-stream).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message (dict): The message received from the preview stream.
|
||||||
|
"""
|
||||||
|
if message.get("type", "") == "image":
|
||||||
|
data = message.get("data", {}).get("default", None)
|
||||||
|
if data is None:
|
||||||
|
logger.error(f"Received image message on device {self.name} without data.")
|
||||||
|
return
|
||||||
|
logger.info(f"Received preview image on device {self.name}")
|
||||||
|
self.preview_image.put(data)
|
||||||
|
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
@property
|
@property
|
||||||
def detector_distance(self) -> float:
|
def detector_distance(self) -> float:
|
||||||
"""The detector distance in mm."""
|
|
||||||
return self._detector_distance
|
return self._detector_distance
|
||||||
|
|
||||||
@detector_distance.setter
|
@detector_distance.setter
|
||||||
def detector_distance(self, value: float) -> None:
|
def detector_distance(self, value: float) -> None:
|
||||||
"""Set the detector distance in mm."""
|
|
||||||
if value <= 0:
|
if value <= 0:
|
||||||
raise ValueError("Detector distance must be a positive value.")
|
raise ValueError("Detector distance must be a positive value.")
|
||||||
self._detector_distance = value
|
self._detector_distance = value
|
||||||
|
|
||||||
|
def set_detector_distance(self, distance: float) -> None:
|
||||||
|
"""
|
||||||
|
Set the detector distance in mm.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
distance (float): The detector distance in mm.
|
||||||
|
"""
|
||||||
|
self.detector_distance = distance
|
||||||
|
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
@property
|
@property
|
||||||
def beam_center(self) -> tuple[float, float]:
|
def beam_center(self) -> tuple[float, float]:
|
||||||
"""The beam center in pixels. (x,y)"""
|
|
||||||
return self._beam_center
|
return self._beam_center
|
||||||
|
|
||||||
@beam_center.setter
|
@beam_center.setter
|
||||||
def beam_center(self, value: tuple[float, float]) -> None:
|
def beam_center(self, value: tuple[float, float]) -> None:
|
||||||
"""Set the beam center in pixels. (x,y)"""
|
if any(coord < 0 for coord in value):
|
||||||
|
raise ValueError("Beam center coordinates must be non-negative.")
|
||||||
self._beam_center = value
|
self._beam_center = value
|
||||||
|
|
||||||
def on_init(self) -> None:
|
def set_beam_center(self, x: float, y: float) -> None:
|
||||||
"""
|
"""
|
||||||
Called when the device is initialized.
|
Set the beam center coordinates in pixels.
|
||||||
|
|
||||||
No siganls are connected at this point,
|
Args:
|
||||||
thus should not be set here but in on_connected instead.
|
x (float): The x coordinate of the beam center in pixels.
|
||||||
|
y (float): The y coordinate of the beam center in pixels.
|
||||||
"""
|
"""
|
||||||
|
self.beam_center = (x, y)
|
||||||
|
|
||||||
|
def on_init(self) -> None:
|
||||||
|
"""Hook called during device initialization."""
|
||||||
|
|
||||||
|
# pylint: disable=arguments-differ
|
||||||
|
def wait_for_connection(self, timeout: float = 10) -> None:
|
||||||
|
"""
|
||||||
|
Wait for the device to be connected to the JungfrauJoch backend.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout (float): Timeout in seconds to wait for the connection.
|
||||||
|
"""
|
||||||
|
self.jfj_client.api.status_get(_request_timeout=timeout) # If connected, this responds
|
||||||
|
|
||||||
def on_connected(self) -> None:
|
def on_connected(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
Hook called after the device is connected to through the device server.
|
||||||
|
|
||||||
Called after the device is connected and its signals are connected.
|
Called after the device is connected and its signals are connected.
|
||||||
Default values for signals should be set here.
|
Default values for signals should be set here. Currently, the detector needs to be
|
||||||
|
initialised manually through the WEB UI of JungfrauJoch. Once agreed upon, the automated
|
||||||
|
initialisation can be re-enabled here (code commented below).
|
||||||
"""
|
"""
|
||||||
|
start_time = time.time()
|
||||||
logger.debug(f"On connected called for {self.name}")
|
logger.debug(f"On connected called for {self.name}")
|
||||||
self.jfj_client.stop(request_timeout=3)
|
self.jfj_client.stop(request_timeout=3)
|
||||||
# Check which detector is selected
|
# Check which detector is selected
|
||||||
|
|
||||||
# Get available detectors
|
# Get available detectors
|
||||||
available_detectors = self.jfj_client.api.config_select_detector_get(_request_timeout=5)
|
available_detectors = self.jfj_client.api.config_select_detector_get(_request_timeout=5)
|
||||||
|
logger.debug(f"Available detectors {available_detectors}")
|
||||||
# Get current detector
|
# Get current detector
|
||||||
current_detector_name = ""
|
current_detector_name = ""
|
||||||
if available_detectors.current_id:
|
if available_detectors.current_id is not None:
|
||||||
detector_selection = [
|
detector_selection = [
|
||||||
det.description
|
det.description
|
||||||
for det in available_detectors.detectors
|
for det in available_detectors.detectors
|
||||||
@@ -190,8 +223,9 @@ class Eiger(PSIDeviceBase):
|
|||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Detector {self.detector_name} is not in IDLE state, current state: {self.jfj_client.detector_state}. Please initialize the detector in the WEB UI: {self._host}."
|
f"Detector {self.detector_name} is not in IDLE state, current state: {self.jfj_client.detector_state}. Please initialize the detector in the WEB UI: {self._host}."
|
||||||
)
|
)
|
||||||
# TODO - check again once Eiger should be initialized automatically, currently human initialization is expected
|
|
||||||
# # Once the automation should be enabled, we may use here
|
# TODO - Currently the initialisation of the detector is done manually through the WEB UI. Once adjusted
|
||||||
|
# this can be automated here again.
|
||||||
# detector_selection = [
|
# detector_selection = [
|
||||||
# det for det in available_detectors.detectors if det.id == self.detector_name
|
# det for det in available_detectors.detectors if det.id == self.detector_name
|
||||||
# ]
|
# ]
|
||||||
@@ -207,41 +241,51 @@ class Eiger(PSIDeviceBase):
|
|||||||
|
|
||||||
# Setup Detector settings, here we may also set the energy already as this might be time consuming
|
# Setup Detector settings, here we may also set the energy already as this might be time consuming
|
||||||
settings = DetectorSettings(frame_time_us=int(500), timing=DetectorTiming.TRIGGER)
|
settings = DetectorSettings(frame_time_us=int(500), timing=DetectorTiming.TRIGGER)
|
||||||
self.jfj_client.set_detector_settings(settings, timeout=10)
|
self.jfj_client.set_detector_settings(settings, timeout=5)
|
||||||
|
|
||||||
# Set the file writer to the appropriate output for the HDF5 file
|
# Set the file writer to the appropriate output for the HDF5 file
|
||||||
file_writer_settings = FileWriterSettings(overwrite=True, format=FileWriterFormat.NXMXVDS)
|
file_writer_settings = FileWriterSettings(overwrite=True, format=FileWriterFormat.NXMXVDS)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Setting writer_settings: {yaml.dump(file_writer_settings.to_dict(), indent=4)}"
|
f"Setting writer_settings: {yaml.dump(file_writer_settings.to_dict(), indent=4)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Setup the file writer settings
|
||||||
self.jfj_client.api.config_file_writer_put(
|
self.jfj_client.api.config_file_writer_put(
|
||||||
file_writer_settings=file_writer_settings, _request_timeout=10
|
file_writer_settings=file_writer_settings, _request_timeout=10
|
||||||
)
|
)
|
||||||
|
|
||||||
# Start the preview client
|
# Start the preview client
|
||||||
self.jfj_preview_client.connect()
|
self.jfj_preview_client.connect()
|
||||||
self.jfj_preview_client.start()
|
self.jfj_preview_client.start()
|
||||||
logger.info(f"Connected to JungfrauJoch preview stream at {self.jfj_preview_client.url}")
|
logger.info(
|
||||||
|
f"Device {self.name} initialized after {time.time()-start_time:.2f}s. Preview stream connected on url: {self.jfj_preview_client.url}"
|
||||||
|
)
|
||||||
|
|
||||||
def on_stage(self) -> DeviceStatus | None:
|
def on_stage(self) -> DeviceStatus | None:
|
||||||
"""
|
"""
|
||||||
Called while staging the device.
|
Hook called when staging the device. Information about the upcoming scan can be accessed from the scan_info object.
|
||||||
|
scan_msg = self.scan_info.msg
|
||||||
Information about the upcoming scan can be accessed from the scan_info object.
|
|
||||||
"""
|
"""
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
scan_msg = self.scan_info.msg
|
scan_msg = self.scan_info.msg
|
||||||
# Set acquisition parameter
|
|
||||||
# TODO add check of mono energy, this can then also be passed to DatasetSettings
|
# TODO: Check mono energy from device in BEC
|
||||||
|
# Setting incident energy in keV
|
||||||
incident_energy = 12.0
|
incident_energy = 12.0
|
||||||
|
# Setting up exp_time and num_triggers acquisition parameter
|
||||||
exp_time = scan_msg.scan_parameters.get("exp_time", 0)
|
exp_time = scan_msg.scan_parameters.get("exp_time", 0)
|
||||||
if exp_time <= self._readout_time:
|
if exp_time <= self._readout_time: # Exp_time must be at least the readout time
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Receive scan request for scan {scan_msg.scan_name} with exp_time {exp_time}s, which must be larger than the readout time {self._readout_time}s of the detector {self.detector_name}."
|
f"Value error on device {self.name}: Exposure time {exp_time}s is less than readout time {self._readout_time}s."
|
||||||
)
|
)
|
||||||
frame_time_us = exp_time #
|
self._num_triggers = int(
|
||||||
ntrigger = int(scan_msg.num_points * scan_msg.scan_parameters["frames_per_trigger"])
|
scan_msg.num_points * scan_msg.scan_parameters["frames_per_trigger"]
|
||||||
# Fetch file path
|
)
|
||||||
|
|
||||||
|
# Setting up the full path for file writing
|
||||||
self._full_path = get_full_path(scan_msg, name=f"{self.name}_master")
|
self._full_path = get_full_path(scan_msg, name=f"{self.name}_master")
|
||||||
self._full_path = os.path.abspath(os.path.expanduser(self._full_path))
|
self._full_path = os.path.abspath(os.path.expanduser(self._full_path))
|
||||||
|
|
||||||
# Inform BEC about upcoming file event
|
# Inform BEC about upcoming file event
|
||||||
self.file_event.put(
|
self.file_event.put(
|
||||||
file_path=self._full_path,
|
file_path=self._full_path,
|
||||||
@@ -249,11 +293,14 @@ class Eiger(PSIDeviceBase):
|
|||||||
successful=False,
|
successful=False,
|
||||||
hinted_h5_entries={"data": "entry/data/data"},
|
hinted_h5_entries={"data": "entry/data/data"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# JFJ adds _master.h5 automatically
|
# JFJ adds _master.h5 automatically
|
||||||
path = os.path.relpath(self._full_path, start="/sls/x12sa/data").removesuffix("_master.h5")
|
path = os.path.relpath(self._full_path, start="/sls/x12sa/data").removesuffix("_master.h5")
|
||||||
|
|
||||||
|
# Create dataset settings for API call.
|
||||||
data_settings = DatasetSettings(
|
data_settings = DatasetSettings(
|
||||||
image_time_us=int(frame_time_us * 1e6), # This is currently ignored
|
image_time_us=int(exp_time * 1e6),
|
||||||
ntrigger=ntrigger,
|
ntrigger=self._num_triggers,
|
||||||
file_prefix=path,
|
file_prefix=path,
|
||||||
beam_x_pxl=int(self._beam_center[0]),
|
beam_x_pxl=int(self._beam_center[0]),
|
||||||
beam_y_pxl=int(self._beam_center[1]),
|
beam_y_pxl=int(self._beam_center[1]),
|
||||||
@@ -261,11 +308,15 @@ class Eiger(PSIDeviceBase):
|
|||||||
incident_energy_ke_v=incident_energy,
|
incident_energy_ke_v=incident_energy,
|
||||||
)
|
)
|
||||||
logger.debug(f"Setting data_settings: {yaml.dump(data_settings.to_dict(), indent=4)}")
|
logger.debug(f"Setting data_settings: {yaml.dump(data_settings.to_dict(), indent=4)}")
|
||||||
prep_time = start_time - time.time()
|
prep_time = time.time()
|
||||||
logger.debug(f"Prepared information for eiger to start acquisition in {prep_time:.2f}s")
|
self.jfj_client.wait_for_idle(timeout=10) # Ensure we are in IDLE state
|
||||||
self.jfj_client.wait_for_idle(timeout=10, request_timeout=10) # Ensure we are in IDLE state
|
|
||||||
self.jfj_client.start(settings=data_settings) # Takes around ~0.6s
|
self.jfj_client.start(settings=data_settings) # Takes around ~0.6s
|
||||||
logger.debug(f"Wait for IDLE and start call took {time.time()-start_time-prep_time:.2f}s")
|
|
||||||
|
# Time the stage process
|
||||||
|
logger.info(
|
||||||
|
f"Device {self.name} staged for scan. Time spent {time.time()-start_time:.2f}s,"
|
||||||
|
f" with {time.time()-prep_time:.2f}s spent with communication to JungfrauJoch."
|
||||||
|
)
|
||||||
|
|
||||||
def on_unstage(self) -> DeviceStatus:
|
def on_unstage(self) -> DeviceStatus:
|
||||||
"""Called while unstaging the device."""
|
"""Called while unstaging the device."""
|
||||||
@@ -278,7 +329,9 @@ class Eiger(PSIDeviceBase):
|
|||||||
|
|
||||||
def _file_event_callback(self, status: DeviceStatus) -> None:
|
def _file_event_callback(self, status: DeviceStatus) -> None:
|
||||||
"""Callback to update the file_event signal when the acquisition is done."""
|
"""Callback to update the file_event signal when the acquisition is done."""
|
||||||
logger.info(f"Acquisition done callback called for {self.name} for status {status.success}")
|
logger.debug(
|
||||||
|
f"File event callback on complete status for device {self.name}: done={status.done}, successful={status.success}"
|
||||||
|
)
|
||||||
self.file_event.put(
|
self.file_event.put(
|
||||||
file_path=self._full_path,
|
file_path=self._full_path,
|
||||||
done=status.done,
|
done=status.done,
|
||||||
@@ -287,19 +340,44 @@ class Eiger(PSIDeviceBase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def on_complete(self) -> DeviceStatus:
|
def on_complete(self) -> DeviceStatus:
|
||||||
"""Called to inquire if a device has completed a scans."""
|
"""
|
||||||
|
Called at the end of the scan. The method should implement an asynchronous wait for the
|
||||||
|
device to complete the acquisition. A callback to update the file_event signal is
|
||||||
|
attached that resolves the file event when the acquisition is done.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DeviceStatus: The status object representing the completion of the acquisition.
|
||||||
|
"""
|
||||||
|
|
||||||
def wait_for_complete():
|
def wait_for_complete():
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
timeout = 10
|
# NOTE: This adjust the time (s) that should be waited for completion of the scan.
|
||||||
for _ in range(timeout):
|
timeout = self._wait_for_on_complete
|
||||||
if self.jfj_client.wait_for_idle(timeout=1, request_timeout=10):
|
while time.time() - start_time < timeout:
|
||||||
|
if self.jfj_client.wait_for_idle(timeout=1, raise_on_timeout=False):
|
||||||
|
# TODO: Once available, add check for
|
||||||
|
statistics: MeasurementStatistics = (
|
||||||
|
self.jfj_client.api.statistics_data_collection_get(_request_timeout=5)
|
||||||
|
)
|
||||||
|
if statistics.images_collected < self._num_triggers:
|
||||||
|
raise EigerError(
|
||||||
|
f"Device {self.name} acquisition incomplete. "
|
||||||
|
f"Expected {self._num_triggers} triggers, "
|
||||||
|
f"but only {statistics.images_collected} were collected."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
logger.info(
|
||||||
|
f"Waiting for device {self.name} to finish complete, time elapsed: "
|
||||||
|
f"{time.time() - start_time}."
|
||||||
|
)
|
||||||
statistics: MeasurementStatistics = self.jfj_client.api.statistics_data_collection_get(
|
statistics: MeasurementStatistics = self.jfj_client.api.statistics_data_collection_get(
|
||||||
_request_timeout=5
|
_request_timeout=5
|
||||||
)
|
)
|
||||||
|
broker_status = self.jfj_client.jfj_status
|
||||||
raise TimeoutError(
|
raise TimeoutError(
|
||||||
f"Timeout after waiting for detector {self.name} to complete for {time.time()-start_time:.2f}s, measurement statistics: {yaml.dump(statistics.to_dict(), indent=4)}"
|
f"Timeout after waiting for device {self.name} to complete for {time.time()-start_time:.2f}s \n \n"
|
||||||
|
f"Broker status: \n{yaml.dump(broker_status.to_dict(), indent=4)} \n \n"
|
||||||
|
f"Measurement statistics: \n{yaml.dump(statistics.to_dict(), indent=4)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
status = self.task_handler.submit_task(wait_for_complete, run=True)
|
status = self.task_handler.submit_task(wait_for_complete, run=True)
|
||||||
@@ -312,7 +390,11 @@ class Eiger(PSIDeviceBase):
|
|||||||
|
|
||||||
def on_stop(self) -> None:
|
def on_stop(self) -> None:
|
||||||
"""Called when the device is stopped."""
|
"""Called when the device is stopped."""
|
||||||
self.jfj_client.stop(
|
self.jfj_client.stop(request_timeout=0.5)
|
||||||
request_timeout=0.5
|
|
||||||
) # Call should not block more than 0.5 seconds to stop all devices...
|
|
||||||
self.task_handler.shutdown()
|
self.task_handler.shutdown()
|
||||||
|
|
||||||
|
def on_destroy(self):
|
||||||
|
"""Called when the device is destroyed."""
|
||||||
|
self.jfj_preview_client.stop()
|
||||||
|
self.on_stop()
|
||||||
|
return super().on_destroy()
|
||||||
|
|||||||
@@ -21,18 +21,18 @@ if TYPE_CHECKING: # pragma no cover
|
|||||||
from bec_server.device_server.device_server import DeviceManagerDS
|
from bec_server.device_server.device_server import DeviceManagerDS
|
||||||
|
|
||||||
EIGER9M_READOUT_TIME_US = 500e-6 # 500 microseconds in s
|
EIGER9M_READOUT_TIME_US = 500e-6 # 500 microseconds in s
|
||||||
DETECTOR_NAME = "EIGER 8.5M (tmp)" # "EIGER 9M""
|
DETECTOR_NAME = "EIGER 9M" # "EIGER 9M""
|
||||||
|
|
||||||
|
|
||||||
# pylint:disable=invalid-name
|
# pylint:disable=invalid-name
|
||||||
class Eiger9M(Eiger):
|
class Eiger9M(Eiger):
|
||||||
"""
|
"""
|
||||||
Eiger 1.5M specific integration for the in-vaccum Eiger.
|
EIGER 9M specific integration for the in-vaccum Eiger.
|
||||||
|
|
||||||
The logic implemented here is coupled to the DelayGenerator integration,
|
The logic implemented here is coupled to the DelayGenerator integration,
|
||||||
repsonsible for the global triggering of all devices through a single Trigger logic.
|
repsonsible for the global triggering of all devices through a single Trigger logic.
|
||||||
Please check the eiger.py class for more details about the integration of relevant backend
|
Please check the eiger.py class for more details about the integration of relevant backend
|
||||||
services. The detector_name must be set to "EIGER 1.5M:
|
services. The detector_name must be set to "EIGER 9M":
|
||||||
"""
|
"""
|
||||||
|
|
||||||
USER_ACCESS = Eiger.USER_ACCESS + [] # Add more user_access methods here.
|
USER_ACCESS = Eiger.USER_ACCESS + [] # Add more user_access methods here.
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
"""Module with client interface for the Jungfrau Joch detector API"""
|
"""Module with a thin client wrapper around the Jungfrau Joch detector API"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
import yaml
|
||||||
from bec_lib.logger import bec_logger
|
from bec_lib.logger import bec_logger
|
||||||
from jfjoch_client.api.default_api import DefaultApi
|
from jfjoch_client.api.default_api import DefaultApi
|
||||||
from jfjoch_client.api_client import ApiClient
|
from jfjoch_client.api_client import ApiClient
|
||||||
@@ -18,7 +20,7 @@ from jfjoch_client.models.detector_settings import DetectorSettings
|
|||||||
|
|
||||||
logger = bec_logger.logger
|
logger = bec_logger.logger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
from ophyd import Device
|
from ophyd import Device
|
||||||
|
|
||||||
|
|
||||||
@@ -29,7 +31,10 @@ class JungfrauJochClientError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class DetectorState(str, enum.Enum):
|
class DetectorState(str, enum.Enum):
|
||||||
"""Possible Detector states for Jungfrau Joch detector"""
|
"""
|
||||||
|
Enum states of the BrokerStatus state. The pydantic model validates in runtime,
|
||||||
|
thus we keep the possible states here for a convenient overview and access.
|
||||||
|
"""
|
||||||
|
|
||||||
INACTIVE = "Inactive"
|
INACTIVE = "Inactive"
|
||||||
IDLE = "Idle"
|
IDLE = "Idle"
|
||||||
@@ -40,13 +45,15 @@ class DetectorState(str, enum.Enum):
|
|||||||
|
|
||||||
|
|
||||||
class JungfrauJochClient:
|
class JungfrauJochClient:
|
||||||
"""Thin wrapper around the Jungfrau Joch API client.
|
"""
|
||||||
|
Jungfrau Joch API client wrapper. It provides a thin wrapper methods around the API client,
|
||||||
|
that allow to connect, initialise, wait for state changes, set settings, start and stop
|
||||||
|
acquisitions.
|
||||||
|
|
||||||
sudo systemctl restart jfjoch_broker
|
Args:
|
||||||
sudo systemctl status jfjoch_broker
|
host (str): Hostname of the Jungfrau Joch broker service.
|
||||||
|
Default is "http://sls-jfjoch-001:8080"
|
||||||
It looks as if the detector is not being stopped properly.
|
parent (Device, optional): Parent ophyd device, used for logging purposes.
|
||||||
One module remains running, how can we restart the detector?
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -59,50 +66,63 @@ class JungfrauJochClient:
|
|||||||
self._parent_name = parent.name if parent else self.__class__.__name__
|
self._parent_name = parent.name if parent else self.__class__.__name__
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def jjf_state(self) -> BrokerStatus:
|
def jfj_status(self) -> BrokerStatus:
|
||||||
"""Get the status of JungfrauJoch"""
|
"""Broker status of JungfrauJoch."""
|
||||||
response = self.api.status_get()
|
response = self.api.status_get()
|
||||||
return BrokerStatus(**response.to_dict())
|
return BrokerStatus(**response.to_dict())
|
||||||
|
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
@property
|
@property
|
||||||
def initialised(self) -> bool:
|
def initialised(self) -> bool:
|
||||||
"""Check if jfj is connected and ready to receive commands"""
|
|
||||||
return self._initialised
|
return self._initialised
|
||||||
|
|
||||||
@initialised.setter
|
@initialised.setter
|
||||||
def initialised(self, value: bool) -> None:
|
def initialised(self, value: bool) -> None:
|
||||||
"""Set the connected status"""
|
|
||||||
self._initialised = value
|
self._initialised = value
|
||||||
|
|
||||||
# TODO this is not correct, as it may be that the state in INACTIVE. Models are not in sync...
|
# pylint: disable=missing-function-docstring
|
||||||
# REMOVE all model enums as most of the validation takes place in the Pydantic models, i.e. BrokerStatus here..
|
|
||||||
@property
|
@property
|
||||||
def detector_state(self) -> DetectorState:
|
def detector_state(self) -> DetectorState:
|
||||||
"""Get the status of JungfrauJoch"""
|
return DetectorState(self.jfj_status.state)
|
||||||
return DetectorState(self.jjf_state.state)
|
|
||||||
|
|
||||||
def connect_and_initialise(self, timeout: int = 10, **kwargs) -> None:
|
def connect_and_initialise(self, timeout: int = 10) -> None:
|
||||||
"""Check if JungfrauJoch is connected and ready to receive commands"""
|
"""
|
||||||
|
Connect and initialise the JungfrauJoch detector. The detector must be in
|
||||||
|
IDLE state to become initialised. This is a blocking call, the timeout parameter
|
||||||
|
will be passed to the HTTP requests timeout method of the wait_for_idle method.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout (int): Timeout in seconds for the initialisation and waiting for IDLE state.
|
||||||
|
"""
|
||||||
status = self.detector_state
|
status = self.detector_state
|
||||||
|
# TODO: #135 Check if the detector has to be in INACTIVE state before initialisation
|
||||||
if status != DetectorState.IDLE:
|
if status != DetectorState.IDLE:
|
||||||
self.api.initialize_post() # This is a blocking call....
|
self.api.initialize_post()
|
||||||
self.wait_for_idle(timeout, request_timeout=timeout) # Blocking call
|
self.wait_for_idle(timeout)
|
||||||
self.initialised = True
|
self.initialised = True
|
||||||
|
|
||||||
def set_detector_settings(self, settings: dict | DetectorSettings, timeout: int = 10) -> None:
|
def set_detector_settings(self, settings: dict | DetectorSettings, timeout: int = 10) -> None:
|
||||||
"""Set the detector settings. JungfrauJoch must be in IDLE, Error or Inactive state.
|
"""
|
||||||
Note, the full settings have to be provided, otherwise the settings will be overwritten with default values.
|
Set the detector settings. The state of JungfrauJoch must be in IDLE,
|
||||||
|
Error or Inactive state. Please note: a full set of setttings has to be provided,
|
||||||
|
otherwise the settings will be overwritten with default values.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (dict): dictionary of settings
|
settings (dict): dictionary of settings
|
||||||
|
timeout (int): Timeout in seconds for the HTTP request to set the settings.
|
||||||
"""
|
"""
|
||||||
state = self.detector_state
|
state = self.detector_state
|
||||||
if state not in [DetectorState.IDLE, DetectorState.ERROR, DetectorState.INACTIVE]:
|
if state not in [DetectorState.IDLE, DetectorState.ERROR, DetectorState.INACTIVE]:
|
||||||
|
logger.info(
|
||||||
|
f"JungfrauJoch backend fo device {self._parent_name} is not in IDLE state,"
|
||||||
|
" waiting 1s before retrying..."
|
||||||
|
)
|
||||||
time.sleep(1) # Give the detector 1s to become IDLE, retry
|
time.sleep(1) # Give the detector 1s to become IDLE, retry
|
||||||
state = self.detector_state
|
state = self.detector_state
|
||||||
if state not in [DetectorState.IDLE, DetectorState.ERROR, DetectorState.INACTIVE]:
|
if state not in [DetectorState.IDLE, DetectorState.ERROR, DetectorState.INACTIVE]:
|
||||||
raise JungfrauJochClientError(
|
raise JungfrauJochClientError(
|
||||||
f"Error in {self._parent_name}. Detector must be in IDLE, ERROR or INACTIVE state to set settings. Current state: {state}"
|
f"Error on {self._parent_name}. Detector must be in IDLE, ERROR or INACTIVE"
|
||||||
|
" state to set settings. Current state: {state}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(settings, dict):
|
if isinstance(settings, dict):
|
||||||
@@ -110,28 +130,36 @@ class JungfrauJochClient:
|
|||||||
try:
|
try:
|
||||||
self.api.config_detector_put(detector_settings=settings, _request_timeout=timeout)
|
self.api.config_detector_put(detector_settings=settings, _request_timeout=timeout)
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
raise TimeoutError(f"Timeout while setting detector settings for {self._parent_name}")
|
raise TimeoutError(
|
||||||
|
f"Timeout on device {self._parent_name} while setting detector settings:\n "
|
||||||
|
f"{yaml.dump(settings, indent=4)}."
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
content = traceback.format_exc()
|
content = traceback.format_exc()
|
||||||
|
logger.error(
|
||||||
|
f"Error on device {self._parent_name} while setting detector settings:\n "
|
||||||
|
f"{yaml.dump(settings, indent=4)}. Error traceback: {content}"
|
||||||
|
)
|
||||||
raise JungfrauJochClientError(
|
raise JungfrauJochClientError(
|
||||||
f"Error while setting detector settings for {self._parent_name}: {content}"
|
f"Error on device {self._parent_name} while setting detector settings:\n "
|
||||||
|
f"{yaml.dump(settings, indent=4)}. Full traceback: {content}."
|
||||||
)
|
)
|
||||||
|
|
||||||
def start(self, settings: dict | DatasetSettings, request_timeout: float = 10) -> None:
|
def start(self, settings: dict | DatasetSettings, request_timeout: float = 10) -> None:
|
||||||
"""Start the mesaurement. DatasetSettings must be provided, and JungfrauJoch must be in IDLE state.
|
"""
|
||||||
The method call is blocking and JungfrauJoch will be ready to measure after the call resolves.
|
Start the acquisition with the provided dataset settings.
|
||||||
|
The detector must be in IDLE state. Settings must always provide a full set of
|
||||||
|
parameters, missing parameters will be set to default values.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (dict): dictionary of settings
|
settings (dict | DatasetSettings): Dataset settings to start the acquisition with.
|
||||||
|
request_timeout (float): Timeout in sec for the HTTP request to start the acquisition.
|
||||||
Please check the DataSettings class for the available settings. Minimum required settings are
|
|
||||||
beam_x_pxl, beam_y_pxl, detector_distance_mm, incident_energy_keV.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
state = self.detector_state
|
state = self.detector_state
|
||||||
if state != DetectorState.IDLE:
|
if state != DetectorState.IDLE:
|
||||||
raise JungfrauJochClientError(
|
raise JungfrauJochClientError(
|
||||||
f"Error in {self._parent_name}. Detector must be in IDLE state to set settings. Current state: {state}"
|
f"Error on device {self._parent_name}. "
|
||||||
|
f"Detector must be in IDLE state to start acquisition. Current state: {state}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(settings, dict):
|
if isinstance(settings, dict):
|
||||||
@@ -141,46 +169,80 @@ class JungfrauJochClient:
|
|||||||
dataset_settings=settings, _request_timeout=request_timeout
|
dataset_settings=settings, _request_timeout=request_timeout
|
||||||
)
|
)
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
|
content = traceback.format_exc()
|
||||||
|
logger.error(
|
||||||
|
f"Timeout error after {request_timeout} seconds on device {self._parent_name} "
|
||||||
|
f"during 'start' call with dataset settings: {yaml.dump(settings, indent=4)}. \n"
|
||||||
|
f"Traceback: {content}"
|
||||||
|
)
|
||||||
raise TimeoutError(
|
raise TimeoutError(
|
||||||
f"TimeoutError in JungfrauJochClient for parent device {self._parent_name} for 'start' call"
|
f"Timeout error after {request_timeout} seconds on device {self._parent_name} "
|
||||||
|
f"during 'start' call with dataset settings: {yaml.dump(settings, indent=4)}."
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
content = traceback.format_exc()
|
content = traceback.format_exc()
|
||||||
|
logger.error(
|
||||||
|
f"Error on device {self._parent_name} during 'start' post with dataset settings: \n"
|
||||||
|
f"{yaml.dump(settings, indent=4)}. \nTraceback: {content}"
|
||||||
|
)
|
||||||
raise JungfrauJochClientError(
|
raise JungfrauJochClientError(
|
||||||
f"Error in JungfrauJochClient for parent device {self._parent_name} during 'start' call: {content}"
|
f"Error on device {self._parent_name} during 'start' post with dataset settings: \n"
|
||||||
|
f"{yaml.dump(settings, indent=4)}. \nTraceback: {content}."
|
||||||
)
|
)
|
||||||
|
|
||||||
def stop(self, request_timeout: float = 0.5) -> None:
|
def stop(self, request_timeout: float = 0.5) -> None:
|
||||||
"""Stop the acquisition, this only logs errors and is not raising."""
|
"""Stop the acquisition, this only logs errors and is not raising."""
|
||||||
try:
|
|
||||||
self.api.cancel_post_with_http_info(_request_timeout=request_timeout)
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
content = traceback.format_exc()
|
|
||||||
logger.error(
|
|
||||||
f"Timeout in JungFrauJochClient for device {self._parent_name} during stop: {content}"
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
content = traceback.format_exc()
|
|
||||||
logger.error(
|
|
||||||
f"Error in JungFrauJochClient for device {self._parent_name} during stop: {content}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def wait_for_idle(self, timeout: int = 10, request_timeout: float | None = None) -> bool:
|
def _stop_call(self):
|
||||||
"""Wait for JungfrauJoch to be in Idle state. Blocking call with timeout.
|
try:
|
||||||
|
self.api.cancel_post_with_http_info() # (_request_timeout=request_timeout)
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
content = traceback.format_exc()
|
||||||
|
logger.error(
|
||||||
|
f"Timeout error after {request_timeout} seconds on device {self._parent_name} "
|
||||||
|
f"during stop: {content}"
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
content = traceback.format_exc()
|
||||||
|
logger.error(f"Error on device {self._parent_name} during stop: {content}")
|
||||||
|
|
||||||
|
thread = threading.Thread(
|
||||||
|
target=_stop_call, daemon=True, args=(self,), name="stop_jungfraujoch_thread"
|
||||||
|
)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
def wait_for_idle(self, timeout: int = 10, raise_on_timeout: bool = True) -> bool:
|
||||||
|
"""
|
||||||
|
Method to wait until the detector is in IDLE state. This is a blocking call with a
|
||||||
|
timeout that can be specified. The additional parameter raise_on_timeout can be used to
|
||||||
|
raise an exception on timeout instead of returning boolean True/False.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
timeout (int): timeout in seconds
|
timeout (int): timeout in seconds
|
||||||
|
raise_on_timeout (bool): If True, raises an exception on timeout. Default is True.
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the detector is in IDLE state, False if timeout occurred
|
bool: True if the detector is in IDLE state, False if timeout occurred
|
||||||
"""
|
"""
|
||||||
if request_timeout is None:
|
|
||||||
request_timeout = timeout
|
|
||||||
try:
|
try:
|
||||||
self.api.wait_till_done_post(timeout=timeout, _request_timeout=request_timeout)
|
self.api.wait_till_done_post(timeout=timeout, _request_timeout=timeout)
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
raise TimeoutError(f"HTTP request timeout in wait_for_idle for {self._parent_name}")
|
|
||||||
except Exception:
|
|
||||||
content = traceback.format_exc()
|
content = traceback.format_exc()
|
||||||
logger.debug(f"Waiting for device {self._parent_name} to become IDLE: {content}")
|
logger.info(
|
||||||
|
f"Timeout after {timeout} seconds on device {self._parent_name} in wait_for_idle: {content}"
|
||||||
|
)
|
||||||
|
if raise_on_timeout:
|
||||||
|
raise TimeoutError(
|
||||||
|
f"Timeout after {timeout} seconds on device {self._parent_name} in wait_for_idle."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
except Exception as exc:
|
||||||
|
content = traceback.format_exc()
|
||||||
|
logger.info(
|
||||||
|
f"Error on device {self._parent_name} in wait_for_idle. Full traceback: {content}"
|
||||||
|
)
|
||||||
|
if raise_on_timeout:
|
||||||
|
raise JungfrauJochClientError(
|
||||||
|
f"Error on device {self._parent_name} in wait_for_idle: {content}"
|
||||||
|
) from exc
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -1,22 +1,136 @@
|
|||||||
"""Module for the Eiger preview ZMQ stream."""
|
"""
|
||||||
|
Module for the JungfrauJoch preview ZMQ stream for the Eiger detector at cSAXS.
|
||||||
|
The Preview client is implemented for the JungfrauJoch ZMQ PUB-SUB interface, and
|
||||||
|
should be independent of the EIGER detector type.
|
||||||
|
|
||||||
|
The client connects to the ZMQ PUB-SUB preview stream and calls a user provided callback
|
||||||
|
function with the decompressed messages received from the stream. The callback needs to be
|
||||||
|
able to deal with the different message types sent by the JungfrauJoch server ("start",
|
||||||
|
"image", "end") as described in the JungfrauJoch ZEROMQ preview stream documentation.
|
||||||
|
(https://jungfraujoch.readthedocs.io/en/latest/ZEROMQ_STREAM.html#preview-stream).
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
|
import cbor2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import zmq
|
import zmq
|
||||||
from bec_lib.logger import bec_logger
|
from bec_lib.logger import bec_logger
|
||||||
|
from dectris.compression import decompress
|
||||||
|
|
||||||
logger = bec_logger.logger
|
logger = bec_logger.logger
|
||||||
|
|
||||||
ZMQ_TOPIC_FILTER = b""
|
###############################
|
||||||
|
###### CBOR TAG DECODERS ######
|
||||||
|
###############################
|
||||||
|
# Dectris specific CBOR tags and decoders for Jungfrau data
|
||||||
|
# Reference:
|
||||||
|
# https://github.com/dectris/documentation/blob/main/stream_v2/examples/client.py
|
||||||
|
|
||||||
|
|
||||||
|
def decode_multi_dim_array(tag: cbor2.CBORTag, column_major: bool = False):
|
||||||
|
"""Decode a multi-dimensional array from a CBOR tag."""
|
||||||
|
dimensions, contents = tag.value
|
||||||
|
if isinstance(contents, list):
|
||||||
|
array = np.empty((len(contents),), dtype=object)
|
||||||
|
array[:] = contents
|
||||||
|
elif isinstance(contents, (np.ndarray, np.generic)):
|
||||||
|
array = contents
|
||||||
|
else:
|
||||||
|
raise cbor2.CBORDecodeValueError("expected array or typed array")
|
||||||
|
return array.reshape(dimensions, order="F" if column_major else "C")
|
||||||
|
|
||||||
|
|
||||||
|
def decode_typed_array(tag: cbor2.CBORTag, dtype: str):
|
||||||
|
"""Decode a typed array from a CBOR tag."""
|
||||||
|
if not isinstance(tag.value, bytes):
|
||||||
|
raise cbor2.CBORDecodeValueError("expected byte string in typed array")
|
||||||
|
return np.frombuffer(tag.value, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
|
def decode_dectris_compression(tag: cbor2.CBORTag):
|
||||||
|
"""Decode a Dectris compressed array from a CBOR tag."""
|
||||||
|
algorithm, elem_size, encoded = tag.value
|
||||||
|
return decompress(encoded, algorithm, elem_size=elem_size)
|
||||||
|
|
||||||
|
|
||||||
|
#########################################
|
||||||
|
#### Dectris CBOR TAG Extensions ########
|
||||||
|
#########################################
|
||||||
|
|
||||||
|
# Mapping of various additional CBOR tags from Dectris to decoder functions
|
||||||
|
tag_decoders = {
|
||||||
|
40: lambda tag: decode_multi_dim_array(tag, column_major=False),
|
||||||
|
64: lambda tag: decode_typed_array(tag, dtype="u1"),
|
||||||
|
65: lambda tag: decode_typed_array(tag, dtype=">u2"),
|
||||||
|
66: lambda tag: decode_typed_array(tag, dtype=">u4"),
|
||||||
|
67: lambda tag: decode_typed_array(tag, dtype=">u8"),
|
||||||
|
68: lambda tag: decode_typed_array(tag, dtype="u1"),
|
||||||
|
69: lambda tag: decode_typed_array(tag, dtype="<u2"),
|
||||||
|
70: lambda tag: decode_typed_array(tag, dtype="<u4"),
|
||||||
|
71: lambda tag: decode_typed_array(tag, dtype="<u8"),
|
||||||
|
72: lambda tag: decode_typed_array(tag, dtype="i1"),
|
||||||
|
73: lambda tag: decode_typed_array(tag, dtype=">i2"),
|
||||||
|
74: lambda tag: decode_typed_array(tag, dtype=">i4"),
|
||||||
|
75: lambda tag: decode_typed_array(tag, dtype=">i8"),
|
||||||
|
77: lambda tag: decode_typed_array(tag, dtype="<i2"),
|
||||||
|
78: lambda tag: decode_typed_array(tag, dtype="<i4"),
|
||||||
|
79: lambda tag: decode_typed_array(tag, dtype="<i8"),
|
||||||
|
80: lambda tag: decode_typed_array(tag, dtype=">f2"),
|
||||||
|
81: lambda tag: decode_typed_array(tag, dtype=">f4"),
|
||||||
|
82: lambda tag: decode_typed_array(tag, dtype=">f8"),
|
||||||
|
83: lambda tag: decode_typed_array(tag, dtype=">f16"),
|
||||||
|
84: lambda tag: decode_typed_array(tag, dtype="<f2"),
|
||||||
|
85: lambda tag: decode_typed_array(tag, dtype="<f4"),
|
||||||
|
86: lambda tag: decode_typed_array(tag, dtype="<f8"),
|
||||||
|
87: lambda tag: decode_typed_array(tag, dtype="<f16"),
|
||||||
|
1040: lambda tag: decode_multi_dim_array(tag, column_major=True),
|
||||||
|
56500: lambda tag: decode_dectris_compression(tag), # pylint: disable=unnecessary-lambda
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def tag_hook(decoder, tag: int):
|
||||||
|
"""
|
||||||
|
Tag hook for the cbor2.loads method. Both arguments "decoder" and "tag" mus be present.
|
||||||
|
We use the tag to choose the respective decoder from the tag_decoders registry if available.
|
||||||
|
"""
|
||||||
|
tag_decoder = tag_decoders.get(tag.tag)
|
||||||
|
return tag_decoder(tag) if tag_decoder else tag
|
||||||
|
|
||||||
|
|
||||||
|
######################
|
||||||
|
#### ZMQ Settings ####
|
||||||
|
######################
|
||||||
|
|
||||||
|
ZMQ_TOPIC_FILTER = b"" # Subscribe to all topics
|
||||||
|
ZMQ_CONFLATE_SETTING = 1 # Keep only the most recent message
|
||||||
|
ZMQ_RCVHWM_SETTING = 1 # Set high water mark to 1, this configures the max number of queue messages
|
||||||
|
|
||||||
|
|
||||||
|
#################################
|
||||||
|
#### Jungfrau Preview Client ####
|
||||||
|
#################################
|
||||||
|
|
||||||
|
|
||||||
class JungfrauJochPreview:
|
class JungfrauJochPreview:
|
||||||
|
"""
|
||||||
|
Preview client for the JungfrauJoch ZMQ preview stream. The client is started with
|
||||||
|
a URL to receive the data from the JungfrauJoch PUB-SUB preview interface, and a
|
||||||
|
callback function that is called with messages received from the preview stream.
|
||||||
|
The callback needs to be able to deal with the different message types sent
|
||||||
|
by the JungfrauJoch server ("start", "image", "end") as described in the
|
||||||
|
JungfrauJoch ZEROMQ preview stream documentation. Messages are dictionary dumps.
|
||||||
|
(https://jungfraujoch.readthedocs.io/en/latest/ZEROMQ_STREAM.html#preview-stream).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url (str): ZMQ PUB-SUB preview stream URL.
|
||||||
|
cb (Callable): Callback function called with messages received from the stream.
|
||||||
|
"""
|
||||||
|
|
||||||
USER_ACCESS = ["start", "stop"]
|
USER_ACCESS = ["start", "stop"]
|
||||||
|
|
||||||
def __init__(self, url: str, cb: Callable):
|
def __init__(self, url: str, cb: Callable):
|
||||||
@@ -27,16 +141,18 @@ class JungfrauJochPreview:
|
|||||||
self._on_update_callback = cb
|
self._on_update_callback = cb
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
"""Connect to the JungfrauJoch PUB-SUB streaming interface
|
"""
|
||||||
|
Connect to the JungfrauJoch PUB-SUB streaming interface. If the connection is refused
|
||||||
JungfrauJoch may reject connection for a few seconds when it restarts,
|
it will reattempt a second time after a one second delay.
|
||||||
so if it fails, wait a bit and try to connect again.
|
|
||||||
"""
|
"""
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
|
|
||||||
context = zmq.Context()
|
context = zmq.Context()
|
||||||
self._socket = context.socket(zmq.SUB)
|
self._socket = context.socket(zmq.SUB)
|
||||||
|
self._socket.setsockopt(zmq.CONFLATE, ZMQ_CONFLATE_SETTING)
|
||||||
self._socket.setsockopt(zmq.SUBSCRIBE, ZMQ_TOPIC_FILTER)
|
self._socket.setsockopt(zmq.SUBSCRIBE, ZMQ_TOPIC_FILTER)
|
||||||
|
self._socket.setsockopt(zmq.RCVHWM, ZMQ_RCVHWM_SETTING)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._socket.connect(self.url)
|
self._socket.connect(self.url)
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
@@ -44,17 +160,26 @@ class JungfrauJochPreview:
|
|||||||
self._socket.connect(self.url)
|
self._socket.connect(self.url)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
|
"""Start the ZMQ update loop in a background thread."""
|
||||||
self._zmq_thread = threading.Thread(
|
self._zmq_thread = threading.Thread(
|
||||||
target=self._zmq_update_loop, daemon=True, name="JungfrauJoch_live_preview"
|
target=self._zmq_update_loop, daemon=True, name="JungfrauJoch_live_preview"
|
||||||
)
|
)
|
||||||
self._zmq_thread.start()
|
self._zmq_thread.start()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
"""Stop the ZMQ update loop and wait for the thread to finish."""
|
||||||
self._shutdown_event.set()
|
self._shutdown_event.set()
|
||||||
if self._zmq_thread:
|
if self._zmq_thread:
|
||||||
self._zmq_thread.join()
|
self._zmq_thread.join(timeout=1.0)
|
||||||
|
|
||||||
def _zmq_update_loop(self):
|
def _zmq_update_loop(self, poll_interval: float = 0.2):
|
||||||
|
"""
|
||||||
|
ZMQ update loop running in a background thread. The polling is throttled by
|
||||||
|
the poll_interval parameter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
poll_interval (float): Time in seconds to wait between polling attempts.
|
||||||
|
"""
|
||||||
while not self._shutdown_event.is_set():
|
while not self._shutdown_event.is_set():
|
||||||
if self._socket is None:
|
if self._socket is None:
|
||||||
self.connect()
|
self.connect()
|
||||||
@@ -64,18 +189,21 @@ class JungfrauJochPreview:
|
|||||||
# Happens when ZMQ partially delivers the multipart message
|
# Happens when ZMQ partially delivers the multipart message
|
||||||
pass
|
pass
|
||||||
except zmq.error.Again:
|
except zmq.error.Again:
|
||||||
# Happens when receive queue is empty
|
logger.debug(
|
||||||
time.sleep(0.1)
|
f"ZMQ Again exception, receive queue is empty for JFJ preview at {self.url}."
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# We throttle the polling to avoid heavy load on the device server
|
||||||
|
time.sleep(poll_interval)
|
||||||
|
|
||||||
def _poll(self):
|
def _poll(self):
|
||||||
"""
|
"""
|
||||||
Poll the ZMQ socket for new data. It will throttle the data update and
|
Poll the ZMQ socket for new data. We are currently subscribing and unsubscribing
|
||||||
only subscribe to the topic for a single update. This is not very nice
|
for each poll loop to avoid receiving too many messages. Throttling of the update
|
||||||
but it seems like there is currently no option to set the update rate on
|
loop is handled in the _zmq_update_loop method.
|
||||||
the backend.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._shutdown_event.wait(0.2):
|
if self._shutdown_event.is_set():
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -90,7 +218,19 @@ class JungfrauJochPreview:
|
|||||||
# Unsubscribe from the topic
|
# Unsubscribe from the topic
|
||||||
self._socket.setsockopt(zmq.UNSUBSCRIBE, ZMQ_TOPIC_FILTER)
|
self._socket.setsockopt(zmq.UNSUBSCRIBE, ZMQ_TOPIC_FILTER)
|
||||||
|
|
||||||
def _parse_data(self, data):
|
def _parse_data(self, bytes_list: list[bytes]):
|
||||||
# TODO decode and parse the data
|
"""
|
||||||
# self._on_update_callback(data)
|
Parse the received ZMQ data from the JungfrauJoch preview stream.
|
||||||
pass
|
We will call the _on_update_callback with the decompressed messages as a dictionary.
|
||||||
|
|
||||||
|
The callback needs to be able to deal with the different message types sent
|
||||||
|
by the JungfrauJoch server ("start", "image", "end") as described in the
|
||||||
|
JungfrauJoch ZEROMQ preview stream documentation. Messages are dictionary dumps.
|
||||||
|
(https://jungfraujoch.readthedocs.io/en/latest/ZEROMQ_STREAM.html#preview-stream).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bytes_list (list[bytes]): List of byte messages received from ZMQ recv_multipart.
|
||||||
|
"""
|
||||||
|
for byte_msg in bytes_list:
|
||||||
|
msg = cbor2.loads(byte_msg, tag_hook=tag_hook)
|
||||||
|
self._on_update_callback(msg)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ This module contains the base class for Galil controllers as well as the signals
|
|||||||
|
|
||||||
import functools
|
import functools
|
||||||
import time
|
import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from bec_lib import bec_logger
|
from bec_lib import bec_logger
|
||||||
from ophyd.utils import ReadOnlyError
|
from ophyd.utils import ReadOnlyError
|
||||||
@@ -347,7 +348,7 @@ class GalilSignalBase(SocketSignal):
|
|||||||
def __init__(self, signal_name, **kwargs):
|
def __init__(self, signal_name, **kwargs):
|
||||||
self.signal_name = signal_name
|
self.signal_name = signal_name
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.controller = self.parent.controller
|
self.controller = self.root.controller if hasattr(self.root, "controller") else None
|
||||||
|
|
||||||
|
|
||||||
class GalilSignalRO(GalilSignalBase):
|
class GalilSignalRO(GalilSignalBase):
|
||||||
|
|||||||
@@ -6,24 +6,26 @@ Link to the Galil RIO vendor page:
|
|||||||
https://www.galil.com/plcs/remote-io/rio-471xx
|
https://www.galil.com/plcs/remote-io/rio-471xx
|
||||||
|
|
||||||
This module provides the GalilRIOController for communication with the RIO controller
|
This module provides the GalilRIOController for communication with the RIO controller
|
||||||
over TCP/IP. It also provides a device integration that interfaces to these
|
over TCP/IP. It also provides a device integration that interfaces to its
|
||||||
8 analog channels.
|
8 analog channels, and 16 digital output channels. Some PLCs may have 24 digital output channels,
|
||||||
|
which can be easily supported by changing the _NUM_DIGITAL_OUTPUT_CHANNELS variable.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import time
|
from typing import TYPE_CHECKING, Literal
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from bec_lib.logger import bec_logger
|
from bec_lib.logger import bec_logger
|
||||||
from ophyd import Component as Cpt
|
from ophyd import DynamicDeviceComponent as DDC
|
||||||
|
from ophyd import Kind
|
||||||
|
from ophyd.utils import ReadOnlyError
|
||||||
from ophyd_devices import PSIDeviceBase
|
from ophyd_devices import PSIDeviceBase
|
||||||
from ophyd_devices.utils.controller import Controller, threadlocked
|
from ophyd_devices.utils.controller import Controller, threadlocked
|
||||||
from ophyd_devices.utils.socket import SocketIO
|
from ophyd_devices.utils.socket import SocketIO
|
||||||
|
|
||||||
from csaxs_bec.devices.omny.galil.galil_ophyd import (
|
from csaxs_bec.devices.omny.galil.galil_ophyd import (
|
||||||
GalilCommunicationError,
|
GalilCommunicationError,
|
||||||
GalilSignalRO,
|
GalilSignalBase,
|
||||||
retry_once,
|
retry_once,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -35,15 +37,11 @@ logger = bec_logger.logger
|
|||||||
|
|
||||||
|
|
||||||
class GalilRIOController(Controller):
|
class GalilRIOController(Controller):
|
||||||
"""
|
"""Controller Class for Galil RIO controller communication."""
|
||||||
Controller Class for Galil RIO controller communication.
|
|
||||||
|
|
||||||
Multiple controllers are in use at the cSAXS beamline:
|
|
||||||
- 129.129.98.64 (port 23)
|
|
||||||
"""
|
|
||||||
|
|
||||||
@threadlocked
|
@threadlocked
|
||||||
def socket_put(self, val: str) -> None:
|
def socket_put(self, val: str) -> None:
|
||||||
|
"""Socker put method."""
|
||||||
self.sock.put(f"{val}\r".encode())
|
self.sock.put(f"{val}\r".encode())
|
||||||
|
|
||||||
@retry_once
|
@retry_once
|
||||||
@@ -64,21 +62,95 @@ class GalilRIOController(Controller):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class GalilRIOSignalRO(GalilSignalRO):
|
class GalilRIOAnalogSignalRO(GalilSignalBase):
|
||||||
"""
|
"""
|
||||||
Read-only Signal for reading a single analog input channel from the Galil RIO controller.
|
Signal for reading analog input channels of the Galil RIO controller. This signal is read-only, so
|
||||||
It always read all 8 analog channels at once, and updates the reabacks of all channels.
|
the set method raises a ReadOnlyError. The get method retrieves the values of all analog
|
||||||
New readbacks are only fetched from the controller if the last readback is older than
|
channels in a single socket command. The readback values of all channels are updated based
|
||||||
_READ_TIMEOUT seconds, otherwise the last cached readback is returned to reduce network traffic.
|
on the response, and subscriptions are run for all channels. Readings are cached as implemented
|
||||||
|
in the SocketSignal class, so that multiple reads of the same channel within an update cycle do
|
||||||
|
not result in multiple socket calls.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
signal_name (str): Name of the signal.
|
signal_name (str): The name of the signal, e.g. "ch0", "ch1", ..., "ch7"
|
||||||
channel (int): Analog channel number (0-7).
|
channel (int): The channel number corresponding to the signal, e.g. 0 for "ch0", 1 for "ch1", ...
|
||||||
parent (GalilRIO): Parent GalilRIO device.
|
parent (GalilRIO): The parent device instance that this signal belongs to.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_NUM_ANALOG_CHANNELS = 8
|
_NUM_ANALOG_CHANNELS = 8
|
||||||
_READ_TIMEOUT = 0.1 # seconds
|
|
||||||
|
def __init__(self, signal_name: str, channel: int, parent: GalilRIO, **kwargs):
|
||||||
|
super().__init__(signal_name=signal_name, parent=parent, **kwargs)
|
||||||
|
self._channel = channel
|
||||||
|
self._metadata["connected"] = False
|
||||||
|
self._metadata["write_access"] = False
|
||||||
|
|
||||||
|
def _socket_set(self, val):
|
||||||
|
"""Read-only signal, so set method raises an error."""
|
||||||
|
raise ReadOnlyError(f"Signal {self.name} is read-only.")
|
||||||
|
|
||||||
|
def _socket_get(self) -> float:
|
||||||
|
"""Get command for the readback signal"""
|
||||||
|
cmd = "MG@" + ", @".join([f"AN[{ii}]" for ii in range(self._NUM_ANALOG_CHANNELS)])
|
||||||
|
ret = self.controller.socket_put_and_receive(cmd)
|
||||||
|
values = [float(val) for val in ret.strip().split(" ")]
|
||||||
|
# Run updates for all channels. This also updates the _readback and metadata timestamp
|
||||||
|
# value of this channel.
|
||||||
|
self._update_all_channels(values)
|
||||||
|
return self._readback
|
||||||
|
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
def _update_all_channels(self, values: list[float]) -> None:
|
||||||
|
"""
|
||||||
|
Method to receive a list of readback values for channels 0 to 7. Updates for each channel idx
|
||||||
|
are applied to the corresponding GalilRIOAnalogSignalRO signal with matching attr_name "ch{idx}".
|
||||||
|
|
||||||
|
We also update the _last_readback attribute of each of the signals, to avoid multiple socket calls,
|
||||||
|
but rather use the cached value of the combined reading for all channels.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
values (list[float]): List of new readback values for all channels, where the
|
||||||
|
index corresponds to the channel number (0-7).
|
||||||
|
"""
|
||||||
|
updates: dict[str, tuple[float, float]] = {} # attr_name -> (new_val, old_val)
|
||||||
|
# Update all readbacks first
|
||||||
|
for walk in self.parent.walk_signals():
|
||||||
|
if isinstance(walk.item, GalilRIOAnalogSignalRO):
|
||||||
|
idx = int(walk.item.attr_name[-1])
|
||||||
|
if 0 <= idx < len(values):
|
||||||
|
old_val = walk.item._readback
|
||||||
|
new_val = values[idx]
|
||||||
|
walk.item._metadata["timestamp"] = self._last_readback
|
||||||
|
walk.item._last_readback = self._last_readback
|
||||||
|
walk.item._readback = new_val
|
||||||
|
if (
|
||||||
|
idx != self._channel
|
||||||
|
): # Only run subscriptions on other channels, not on itself
|
||||||
|
# as this is handled by the SocketSignal and we want to avoid running multiple
|
||||||
|
# subscriptions for the same channel update
|
||||||
|
updates[walk.item.attr_name] = (new_val, old_val)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"Received {len(values)} values but found channel index {idx} in signal {walk.item.name}. Skipping update for this signal."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run subscriptions after all readbacks have been updated
|
||||||
|
# on all channels except the one that triggered the update
|
||||||
|
for walk in self.parent.walk_signals():
|
||||||
|
if walk.item.attr_name in updates:
|
||||||
|
new_val, old_val = updates[walk.item.attr_name]
|
||||||
|
walk.item._run_subs(
|
||||||
|
sub_type=walk.item.SUB_VALUE,
|
||||||
|
old_value=old_val,
|
||||||
|
value=new_val,
|
||||||
|
timestamp=self._last_readback,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GalilRIODigitalOutSignal(GalilSignalBase):
|
||||||
|
"""Signal for controlling digital outputs of the Galil RIO controller."""
|
||||||
|
|
||||||
|
_NUM_DIGITAL_OUTPUT_CHANNELS = 16
|
||||||
|
|
||||||
def __init__(self, signal_name: str, channel: int, parent: GalilRIO, **kwargs):
|
def __init__(self, signal_name: str, channel: int, parent: GalilRIO, **kwargs):
|
||||||
super().__init__(signal_name, parent=parent, **kwargs)
|
super().__init__(signal_name, parent=parent, **kwargs)
|
||||||
@@ -87,81 +159,83 @@ class GalilRIOSignalRO(GalilSignalRO):
|
|||||||
|
|
||||||
def _socket_get(self) -> float:
|
def _socket_get(self) -> float:
|
||||||
"""Get command for the readback signal"""
|
"""Get command for the readback signal"""
|
||||||
cmd = "MG@" + ",@".join([f"AN[{ii}]" for ii in range(self._NUM_ANALOG_CHANNELS)])
|
cmd = f"MG@OUT[{self._channel}]"
|
||||||
ret = self.controller.socket_put_and_receive(cmd)
|
ret = self.controller.socket_put_and_receive(cmd)
|
||||||
values = [float(val) for val in ret.strip().split(" ")]
|
self._readback = float(ret.strip())
|
||||||
# This updates all channels' readbacks, including self._readback
|
|
||||||
self._update_all_channels(values)
|
|
||||||
return self._readback
|
return self._readback
|
||||||
|
|
||||||
def get(self):
|
def _socket_set(self, val: Literal[0, 1]) -> None:
|
||||||
"""Get current analog channel values from the Galil RIO controller."""
|
"""Set command for the digital output signal. Value should be 0 or 1."""
|
||||||
# If the last readback has happend more than _READ_TIMEOUT seconds ago, read all channels again
|
|
||||||
if time.monotonic() - self.parent.last_readback > self._READ_TIMEOUT:
|
|
||||||
self._readback = self._socket_get()
|
|
||||||
return self._readback
|
|
||||||
|
|
||||||
# pylint: disable=protected-access
|
if val not in (0, 1):
|
||||||
def _update_all_channels(self, values: list[float]) -> None:
|
raise ValueError("Digital output value must be 0 or 1.")
|
||||||
"""
|
cmd = f"SB{self._channel}" if val == 1 else f"CB{self._channel}"
|
||||||
Update all analog channel readbacks based on the provided list of values.
|
self.controller.socket_put_confirmed(cmd)
|
||||||
List of values must be in order from an_ch0 to an_ch7.
|
|
||||||
|
|
||||||
We first have to update the _last_readback timestamp of the GalilRIO parent device.
|
|
||||||
Then we update all readbacks of all an_ch channels, before we run any subscriptions.
|
|
||||||
This ensures that all readbacks are updated before any subscriptions are run, which
|
|
||||||
may themselves read other channels.
|
|
||||||
|
|
||||||
Args:
|
def _create_analog_channels(num_channels: int) -> dict[str, tuple]:
|
||||||
values (list[float]): List of 8 float values corresponding to the analog channels.
|
"""
|
||||||
They must be in order from an_ch0 to an_ch7.
|
Helper method to create a dictionary of analog channel definitions for the DynamicDeviceComponent.
|
||||||
"""
|
|
||||||
timestamp = time.time()
|
|
||||||
# Update parent's last readback before running subscriptions!!
|
|
||||||
self.parent._last_readback = time.monotonic()
|
|
||||||
updates: dict[str, tuple[float, float]] = {} # attr_name -> (new_val, old_val)
|
|
||||||
# Update all readbacks first
|
|
||||||
for walk in self.parent.walk_signals():
|
|
||||||
if walk.item.attr_name.startswith("an_ch"):
|
|
||||||
idx = int(walk.item.attr_name[-1])
|
|
||||||
if 0 <= idx < len(values):
|
|
||||||
old_val = walk.item._readback
|
|
||||||
new_val = values[idx]
|
|
||||||
walk.item._metadata["timestamp"] = timestamp
|
|
||||||
walk.item._readback = new_val
|
|
||||||
updates[walk.item.attr_name] = (new_val, old_val)
|
|
||||||
|
|
||||||
# Run subscriptions after all readbacks have been updated
|
Args:
|
||||||
for walk in self.parent.walk_signals():
|
num_channels (int): The number of analog channels to create.
|
||||||
if walk.item.attr_name in updates:
|
"""
|
||||||
new_val, old_val = updates[walk.item.attr_name]
|
an_channels = {}
|
||||||
walk.item._run_subs(
|
for i in range(0, num_channels):
|
||||||
sub_type=walk.item.SUB_VALUE,
|
an_channels[f"ch{i}"] = (
|
||||||
old_value=old_val,
|
GalilRIOAnalogSignalRO,
|
||||||
value=new_val,
|
f"ch{i}",
|
||||||
timestamp=timestamp,
|
{"kind": Kind.normal, "notify_bec": True, "channel": i, "doc": f"Analog channel {i}."},
|
||||||
)
|
)
|
||||||
|
return an_channels
|
||||||
|
|
||||||
|
|
||||||
|
def _create_digital_output_channels(num_channels: int) -> dict[str, tuple]:
|
||||||
|
"""
|
||||||
|
Helper method to create a dictionary of digital output channel definitions for the DynamicDeviceComponent.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
num_channels (int): The number of digital output channels to create.
|
||||||
|
"""
|
||||||
|
di_out_channels = {}
|
||||||
|
for i in range(0, num_channels):
|
||||||
|
di_out_channels[f"ch{i}"] = (
|
||||||
|
GalilRIODigitalOutSignal,
|
||||||
|
f"ch{i}",
|
||||||
|
{
|
||||||
|
"kind": Kind.config,
|
||||||
|
"notify_bec": True,
|
||||||
|
"channel": i,
|
||||||
|
"doc": f"Digital output channel {i}.",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return di_out_channels
|
||||||
|
|
||||||
|
|
||||||
class GalilRIO(PSIDeviceBase):
|
class GalilRIO(PSIDeviceBase):
|
||||||
"""
|
"""
|
||||||
Galil RIO controller integration with 8 analog input channels. To implement the device,
|
Galil RIO controller integration with 16 digital output channels and 8 analog input channels.
|
||||||
please provide the appropriate host and port (default port is 23).
|
The default port for the controller is 23.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
host (str): Hostname or IP address of the Galil RIO controller.
|
host (str): Hostname or IP address of the Galil RIO controller.
|
||||||
|
port (int, optional): Port number for the TCP/IP connection. Defaults to 23.
|
||||||
|
socket_cls (type[SocketIO], optional): Socket class to use for communication. Defaults to SocketIO.
|
||||||
|
scan_info (ScanInfo, optional): ScanInfo object for the device.
|
||||||
|
device_manager (DeviceManagerDS): The device manager instance that manages this device.
|
||||||
|
**kwargs: Additional keyword arguments passed to the PSIDeviceBase constructor.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
SUB_CONNECTION_CHANGE = "connection_change"
|
SUB_CONNECTION_CHANGE = "connection_change"
|
||||||
|
|
||||||
an_ch0 = Cpt(GalilRIOSignalRO, signal_name="an_ch0", channel=0, doc="Analog input channel 0")
|
#############################
|
||||||
an_ch1 = Cpt(GalilRIOSignalRO, signal_name="an_ch1", channel=1, doc="Analog input channel 1")
|
### Analog input channels ###
|
||||||
an_ch2 = Cpt(GalilRIOSignalRO, signal_name="an_ch2", channel=2, doc="Analog input channel 2")
|
#############################
|
||||||
an_ch3 = Cpt(GalilRIOSignalRO, signal_name="an_ch3", channel=3, doc="Analog input channel 3")
|
|
||||||
an_ch4 = Cpt(GalilRIOSignalRO, signal_name="an_ch4", channel=4, doc="Analog input channel 4")
|
analog_in = DDC(_create_analog_channels(GalilRIOAnalogSignalRO._NUM_ANALOG_CHANNELS))
|
||||||
an_ch5 = Cpt(GalilRIOSignalRO, signal_name="an_ch5", channel=5, doc="Analog input channel 5")
|
digital_out = DDC(
|
||||||
an_ch6 = Cpt(GalilRIOSignalRO, signal_name="an_ch6", channel=6, doc="Analog input channel 6")
|
_create_digital_output_channels(GalilRIODigitalOutSignal._NUM_DIGITAL_OUTPUT_CHANNELS)
|
||||||
an_ch7 = Cpt(GalilRIOSignalRO, signal_name="an_ch7", channel=7, doc="Analog input channel 7")
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -177,19 +251,18 @@ class GalilRIO(PSIDeviceBase):
|
|||||||
if port is None:
|
if port is None:
|
||||||
port = 23 # Default port for Galil RIO controller
|
port = 23 # Default port for Galil RIO controller
|
||||||
self.controller = GalilRIOController(
|
self.controller = GalilRIOController(
|
||||||
socket_cls=socket_cls, socket_host=host, socket_port=port, device_manager=device_manager
|
name=f"GalilRIOController_{name}",
|
||||||
|
socket_cls=socket_cls,
|
||||||
|
socket_host=host,
|
||||||
|
socket_port=port,
|
||||||
|
device_manager=device_manager,
|
||||||
)
|
)
|
||||||
self._last_readback: float = time.monotonic()
|
self._readback_metadata: dict[str, float] = {"last_readback": 0.0}
|
||||||
super().__init__(name=name, device_manager=device_manager, scan_info=scan_info, **kwargs)
|
super().__init__(name=name, device_manager=device_manager, scan_info=scan_info, **kwargs)
|
||||||
self.controller.subscribe(
|
self.controller.subscribe(
|
||||||
self._update_connection_state, event_type=self.SUB_CONNECTION_CHANGE
|
self._update_connection_state, event_type=self.SUB_CONNECTION_CHANGE
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def last_readback(self) -> float:
|
|
||||||
"""Return the time of the last readback from the controller."""
|
|
||||||
return self._last_readback
|
|
||||||
|
|
||||||
# pylint: disable=arguments-differ
|
# pylint: disable=arguments-differ
|
||||||
def wait_for_connection(self, timeout: float = 30.0) -> None:
|
def wait_for_connection(self, timeout: float = 30.0) -> None:
|
||||||
"""Wait for the RIO controller to be connected within timeout period."""
|
"""Wait for the RIO controller to be connected within timeout period."""
|
||||||
@@ -207,7 +280,7 @@ class GalilRIO(PSIDeviceBase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
HOST_NAME = "129.129.98.64"
|
HOST_NAME = "129.129.122.14"
|
||||||
from bec_server.device_server.tests.utils import DMMock
|
from bec_server.device_server.tests.utils import DMMock
|
||||||
|
|
||||||
dm = DMMock()
|
dm = DMMock()
|
||||||
|
|||||||
@@ -25,6 +25,34 @@ logger = bec_logger.logger
|
|||||||
|
|
||||||
|
|
||||||
class LamniGalilController(GalilController):
|
class LamniGalilController(GalilController):
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# Error status
|
||||||
|
# ============================================================
|
||||||
|
|
||||||
|
caperr_bits = {
|
||||||
|
0x01: "Cap1 outside expected left-stop range (early check)",
|
||||||
|
0x02: "Cap2 outside expected left-stop range (early check)",
|
||||||
|
0x04: "Cap1 too low during pressure-off check (near right boundary)",
|
||||||
|
0x08: "Cap2 too low during pressure-off check (near right boundary)",
|
||||||
|
0x10: "Cap1 exceeded allowed left-stop boundary during movement",
|
||||||
|
0x20: "Cap2 exceeded allowed left-stop boundary during movement (disabled in code)",
|
||||||
|
0x40: "Cap1 did not respond to test movement",
|
||||||
|
0x80: "Cap2 did not respond to test movement"
|
||||||
|
}
|
||||||
|
|
||||||
|
allaxrer_table = {
|
||||||
|
1: "Not all axes referenced after reference search",
|
||||||
|
2: "Pressure-loss emergency stop (pressure 14/15 active while motor C off)",
|
||||||
|
3: "Unexpected pressure OFF while soft-limits not yet set",
|
||||||
|
4: "Pressure valve mismatch (OUT13=0 but IN13=1)",
|
||||||
|
5: "Capacitive sensor boundary violations (caperr > 0)",
|
||||||
|
6: "Emergency Stop triggered (IN[5]=0)",
|
||||||
|
7: "Following error detected on one or more axes"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
USER_ACCESS = [
|
USER_ACCESS = [
|
||||||
"describe",
|
"describe",
|
||||||
"show_running_threads",
|
"show_running_threads",
|
||||||
@@ -37,6 +65,8 @@ class LamniGalilController(GalilController):
|
|||||||
"get_motor_limit_switch",
|
"get_motor_limit_switch",
|
||||||
"is_motor_on",
|
"is_motor_on",
|
||||||
"all_axes_referenced",
|
"all_axes_referenced",
|
||||||
|
"lamni_lights_off",
|
||||||
|
"lamni_lights_on"
|
||||||
]
|
]
|
||||||
|
|
||||||
def show_status_other(self):
|
def show_status_other(self):
|
||||||
@@ -60,6 +90,47 @@ class LamniGalilController(GalilController):
|
|||||||
print("There is air pressure at the outer rotation radial.")
|
print("There is air pressure at the outer rotation radial.")
|
||||||
swver = float(self.socket_put_and_receive("MGswver"))
|
swver = float(self.socket_put_and_receive("MGswver"))
|
||||||
print(f"Lgalil LAMNI firmware version {swver:2.0f}.")
|
print(f"Lgalil LAMNI firmware version {swver:2.0f}.")
|
||||||
|
allaxref = int(float(self.socket_put_and_receive("MGallaxref")))
|
||||||
|
print(f"Error statuts:")
|
||||||
|
if allaxref == 1:
|
||||||
|
print(f"Allaxref = 1, all OK.")
|
||||||
|
else:
|
||||||
|
print(f"Allaxref = {allaxref}. Not all axes are referenced or error introduced preventing motion.")
|
||||||
|
allaxrer = int(float(self.socket_put_and_receive("MGallaxrer")))
|
||||||
|
print("\nallaxrer =", allaxrer)
|
||||||
|
print(self.decode_allaxrer(allaxrer))
|
||||||
|
caperr = int(float(self.socket_put_and_receive("MGcaperr")))
|
||||||
|
print("\nDecoding caperr =", caperr)
|
||||||
|
self.visualize_caperr(caperr)
|
||||||
|
|
||||||
|
def decode_allaxrer(self, code: int) -> str:
|
||||||
|
"""Return human-readable meaning of allaxrer code."""
|
||||||
|
return self.allaxrer_table.get(code, "Unknown allaxrer code")
|
||||||
|
|
||||||
|
def visualize_caperr(self, mask: int):
|
||||||
|
"""Pretty-print a bitmask visualization for caperr."""
|
||||||
|
print("\n=== CAPERR BITMASK VISUALIZER ===")
|
||||||
|
print(f"Raw value: {mask} (0x{mask:02X})")
|
||||||
|
print("----------------------------------\n")
|
||||||
|
|
||||||
|
print("Bit | Hex | Active | Meaning")
|
||||||
|
print("----------------------------------")
|
||||||
|
|
||||||
|
for bit, meaning in self.caperr_bits.items():
|
||||||
|
active = "YES" if mask & bit else "no"
|
||||||
|
print(f"{bit:3d} | 0x{bit:02X} | {active:6} | {meaning}")
|
||||||
|
|
||||||
|
print("\nActive flags:")
|
||||||
|
active_flags = [meaning for bit, meaning in self.caperr_bits.items() if mask & bit]
|
||||||
|
|
||||||
|
if active_flags:
|
||||||
|
for f in active_flags:
|
||||||
|
print(" ✓", f)
|
||||||
|
else:
|
||||||
|
print(" (none)")
|
||||||
|
|
||||||
|
print("\n==================================\n")
|
||||||
|
|
||||||
|
|
||||||
def lamni_lights_off(self):
|
def lamni_lights_off(self):
|
||||||
self.socket_put_confirmed("SB1")
|
self.socket_put_confirmed("SB1")
|
||||||
@@ -93,7 +164,7 @@ class LamniGalilReadbackSignal(GalilSignalRO):
|
|||||||
val = super().read()
|
val = super().read()
|
||||||
if self.parent.axis_Id_numeric == 2:
|
if self.parent.axis_Id_numeric == 2:
|
||||||
try:
|
try:
|
||||||
rt = self.parent.device_manager.devices[self.parent.rtx]
|
rt = self.parent.device_manager.devices[self.parent.rt]
|
||||||
if rt.enabled:
|
if rt.enabled:
|
||||||
rt.obj.controller.set_rotation_angle(val[self.parent.name]["value"])
|
rt.obj.controller.set_rotation_angle(val[self.parent.name]["value"])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -147,7 +218,7 @@ class LamniGalilMotor(Device, PositionerBase):
|
|||||||
raise BECConfigError(
|
raise BECConfigError(
|
||||||
"device_mapping has been specified but the device_manager cannot be accessed."
|
"device_mapping has been specified but the device_manager cannot be accessed."
|
||||||
)
|
)
|
||||||
self.rt = self.device_mapping.get("rt")
|
self.rt = self.device_mapping.get("rt", "rtx")
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
prefix,
|
prefix,
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from ophyd.status import wait as status_wait
|
|||||||
from ophyd.utils import LimitError, ReadOnlyError
|
from ophyd.utils import LimitError, ReadOnlyError
|
||||||
from ophyd_devices.utils.controller import Controller, threadlocked
|
from ophyd_devices.utils.controller import Controller, threadlocked
|
||||||
from ophyd_devices.utils.socket import SocketIO, SocketSignal, raise_if_disconnected
|
from ophyd_devices.utils.socket import SocketIO, SocketSignal, raise_if_disconnected
|
||||||
|
from prettytable import PrettyTable
|
||||||
|
|
||||||
from csaxs_bec.devices.omny.rt.rt_ophyd import RtCommunicationError, RtError
|
from csaxs_bec.devices.omny.rt.rt_ophyd import RtCommunicationError, RtError
|
||||||
|
|
||||||
@@ -51,6 +52,7 @@ class RtLamniController(Controller):
|
|||||||
_axes_per_controller = 3
|
_axes_per_controller = 3
|
||||||
USER_ACCESS = [
|
USER_ACCESS = [
|
||||||
"socket_put_and_receive",
|
"socket_put_and_receive",
|
||||||
|
"socket_put",
|
||||||
"set_rotation_angle",
|
"set_rotation_angle",
|
||||||
"feedback_disable",
|
"feedback_disable",
|
||||||
"feedback_enable_without_reset",
|
"feedback_enable_without_reset",
|
||||||
@@ -62,6 +64,11 @@ class RtLamniController(Controller):
|
|||||||
"_set_axis_velocity_maximum_speed",
|
"_set_axis_velocity_maximum_speed",
|
||||||
"_position_sampling_single_read",
|
"_position_sampling_single_read",
|
||||||
"_position_sampling_single_reset_and_start_sampling",
|
"_position_sampling_single_reset_and_start_sampling",
|
||||||
|
"show_signal_strength_interferometer",
|
||||||
|
"show_interferometer_positions",
|
||||||
|
"show_analog_signals",
|
||||||
|
"show_feedback_status",
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -208,8 +215,9 @@ class RtLamniController(Controller):
|
|||||||
|
|
||||||
@threadlocked
|
@threadlocked
|
||||||
def start_scan(self):
|
def start_scan(self):
|
||||||
interferometer_feedback_not_running = int((self.socket_put_and_receive("J2")).split(",")[0])
|
# interferometer_feedback_not_running = int((self.socket_put_and_receive("J2")).split(",")[0])
|
||||||
if interferometer_feedback_not_running == 1:
|
# if interferometer_feedback_not_running == 1:
|
||||||
|
if not self.feedback_is_running():
|
||||||
logger.error(
|
logger.error(
|
||||||
"Cannot start scan because feedback loop is not running or there is an interferometer error."
|
"Cannot start scan because feedback loop is not running or there is an interferometer error."
|
||||||
)
|
)
|
||||||
@@ -270,6 +278,44 @@ class RtLamniController(Controller):
|
|||||||
"average_lamni_angle": {"value": self.average_lamni_angle / (int(return_table[0]) + 1)},
|
"average_lamni_angle": {"value": self.average_lamni_angle / (int(return_table[0]) + 1)},
|
||||||
}
|
}
|
||||||
return signals
|
return signals
|
||||||
|
|
||||||
|
def feedback_is_running(self) -> bool:
|
||||||
|
status = int(float((self.socket_put_and_receive("J2")).split(",")[0]))
|
||||||
|
return status == 0 # 0 means running, 1 means error/disabled
|
||||||
|
|
||||||
|
def show_feedback_status(self):
|
||||||
|
if self.feedback_is_running():
|
||||||
|
print("Loop is running, no error on interferometer.")
|
||||||
|
else:
|
||||||
|
print("Loop is not running, either it is turned off or an interferometer error occurred.")
|
||||||
|
|
||||||
|
|
||||||
|
def show_analog_signals(self) -> dict:
|
||||||
|
self.socket_put("As") # start sampling
|
||||||
|
time.sleep(0.01)
|
||||||
|
return_table = (self.socket_put_and_receive("Ar")).split(",")
|
||||||
|
|
||||||
|
number_of_samples = int(float(return_table[0]))
|
||||||
|
signals = {
|
||||||
|
"number_of_samples": number_of_samples,
|
||||||
|
"piezo_0": float(return_table[1]),
|
||||||
|
"piezo_1": float(return_table[2]),
|
||||||
|
"cap_0": float(return_table[3]),
|
||||||
|
"cap_1": float(return_table[4]),
|
||||||
|
"cap_2": float(return_table[5]),
|
||||||
|
"cap_3": float(return_table[6]),
|
||||||
|
"cap_4": float(return_table[7]),
|
||||||
|
}
|
||||||
|
|
||||||
|
t = PrettyTable()
|
||||||
|
t.title = f"LamNI Analog Signals ({number_of_samples} samples)"
|
||||||
|
t.field_names = ["Signal", "Value"]
|
||||||
|
for key, val in signals.items():
|
||||||
|
if key != "number_of_samples":
|
||||||
|
t.add_row([key, f"{val:.4f}"])
|
||||||
|
print(t)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
def read_positions_from_sampler(self):
|
def read_positions_from_sampler(self):
|
||||||
# this was for reading after the scan completed
|
# this was for reading after the scan completed
|
||||||
@@ -347,6 +393,48 @@ class RtLamniController(Controller):
|
|||||||
)
|
)
|
||||||
return bool(return_table[0])
|
return bool(return_table[0])
|
||||||
|
|
||||||
|
def show_signal_strength_interferometer(self):
|
||||||
|
# trigger SSI averaging before reading
|
||||||
|
self.socket_put("J3")
|
||||||
|
time.sleep(0.05)
|
||||||
|
return_table = (self.socket_put_and_receive("J2")).split(",")
|
||||||
|
ssi_0 = float(return_table[1])
|
||||||
|
ssi_1 = float(return_table[2])
|
||||||
|
|
||||||
|
return_table_angle = (self.socket_put_and_receive("J7")).split(",")
|
||||||
|
angle_running = bool(int(float(return_table_angle[0])))
|
||||||
|
angle_position = float(return_table_angle[1])
|
||||||
|
angle_signal = float(return_table_angle[2])
|
||||||
|
|
||||||
|
t = PrettyTable()
|
||||||
|
t.title = "Interferometer signal strength"
|
||||||
|
t.field_names = ["Axis", "Description", "Value", "Running"]
|
||||||
|
t.add_row([0, "ST FZP horizontal", ssi_0, "-"])
|
||||||
|
t.add_row([1, "ST FZP vertical", ssi_1, "-"])
|
||||||
|
t.add_row([2, "Angle interferometer", angle_signal, angle_running])
|
||||||
|
print(t)
|
||||||
|
|
||||||
|
if angle_running:
|
||||||
|
print(f"Angle interferometer position: {angle_position:.4f} um")
|
||||||
|
else:
|
||||||
|
print("Warning: angle interferometer is not running.")
|
||||||
|
|
||||||
|
def show_interferometer_positions(self) -> dict:
|
||||||
|
return_table = (self.socket_put_and_receive("J4")).split(",")
|
||||||
|
loop_status = bool(int(float(return_table[0])))
|
||||||
|
pos_y = float(return_table[1])
|
||||||
|
pos_x = float(return_table[2])
|
||||||
|
|
||||||
|
t = PrettyTable()
|
||||||
|
t.title = "LamNI Interferometer Positions"
|
||||||
|
t.field_names = ["Axis", "Description", "Position (um)"]
|
||||||
|
t.add_row([0, "X", f"{pos_x:.4f}"])
|
||||||
|
t.add_row([1, "Y", f"{pos_y:.4f}"])
|
||||||
|
print(t)
|
||||||
|
print(f"Feedback loop running: {loop_status}")
|
||||||
|
|
||||||
|
return {"x": pos_x, "y": pos_y, "loop_running": loop_status}
|
||||||
|
|
||||||
def feedback_enable_with_reset(self):
|
def feedback_enable_with_reset(self):
|
||||||
if not self.feedback_status_angle_lamni():
|
if not self.feedback_status_angle_lamni():
|
||||||
self.feedback_disable_and_even_reset_lamni_angle_interferometer()
|
self.feedback_disable_and_even_reset_lamni_angle_interferometer()
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ dependencies = [
|
|||||||
"bec_widgets",
|
"bec_widgets",
|
||||||
"zmq",
|
"zmq",
|
||||||
"opencv-python",
|
"opencv-python",
|
||||||
|
"dectris-compression", # for JFJ preview stream decompression
|
||||||
|
"cbor2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
"""
|
|
||||||
Conftest runs for all tests in this directory and subdirectories. Thereby, we know for
|
|
||||||
certain that the SocketSignal.READBACK_TIMEOUT is set to 0 for all tests, which prevents
|
|
||||||
hanging tests when a readback is attempted on a non-connected socket.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# conftest.py
|
|
||||||
import pytest
|
|
||||||
from ophyd_devices.utils.socket import SocketSignal
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def patch_socket_timeout(monkeypatch):
|
|
||||||
monkeypatch.setattr(SocketSignal, "READBACK_TIMEOUT", 0.0)
|
|
||||||
@@ -5,6 +5,7 @@ from time import time
|
|||||||
from typing import TYPE_CHECKING, Generator
|
from typing import TYPE_CHECKING, Generator
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
from bec_lib.messages import FileMessage, ScanStatusMessage
|
from bec_lib.messages import FileMessage, ScanStatusMessage
|
||||||
from jfjoch_client.models.broker_status import BrokerStatus
|
from jfjoch_client.models.broker_status import BrokerStatus
|
||||||
@@ -78,7 +79,7 @@ def detector_list(request) -> Generator[DetectorList, None, None]:
|
|||||||
),
|
),
|
||||||
DetectorListElement(
|
DetectorListElement(
|
||||||
id=2,
|
id=2,
|
||||||
description="EIGER 8.5M (tmp)",
|
description="EIGER 9M",
|
||||||
serial_number="123456",
|
serial_number="123456",
|
||||||
base_ipv4_addr="192.168.0.1",
|
base_ipv4_addr="192.168.0.1",
|
||||||
udp_interface_count=1,
|
udp_interface_count=1,
|
||||||
@@ -103,7 +104,11 @@ def eiger_1_5m(mock_scan_info) -> Generator[Eiger1_5M, None, None]:
|
|||||||
name = "eiger_1_5m"
|
name = "eiger_1_5m"
|
||||||
dev = Eiger1_5M(name=name, beam_center=(256, 256), detector_distance=100.0)
|
dev = Eiger1_5M(name=name, beam_center=(256, 256), detector_distance=100.0)
|
||||||
dev.scan_info.msg = mock_scan_info
|
dev.scan_info.msg = mock_scan_info
|
||||||
yield dev
|
try:
|
||||||
|
yield dev
|
||||||
|
finally:
|
||||||
|
if dev._destroyed is False:
|
||||||
|
dev.destroy()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
@@ -113,7 +118,19 @@ def eiger_9m(mock_scan_info) -> Generator[Eiger9M, None, None]:
|
|||||||
name = "eiger_9m"
|
name = "eiger_9m"
|
||||||
dev = Eiger9M(name=name)
|
dev = Eiger9M(name=name)
|
||||||
dev.scan_info.msg = mock_scan_info
|
dev.scan_info.msg = mock_scan_info
|
||||||
yield dev
|
try:
|
||||||
|
yield dev
|
||||||
|
finally:
|
||||||
|
if dev._destroyed is False:
|
||||||
|
dev.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
def test_eiger_wait_for_connection(eiger_1_5m, eiger_9m):
|
||||||
|
"""Test the wait_for_connection metho is calling status_get on the JFJ API client."""
|
||||||
|
for eiger in (eiger_1_5m, eiger_9m):
|
||||||
|
with mock.patch.object(eiger.jfj_client.api, "status_get") as mock_status_get:
|
||||||
|
eiger.wait_for_connection(timeout=1)
|
||||||
|
mock_status_get.assert_called_once_with(_request_timeout=1)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("detector_state", ["Idle", "Inactive"])
|
@pytest.mark.parametrize("detector_state", ["Idle", "Inactive"])
|
||||||
@@ -141,7 +158,7 @@ def test_eiger_1_5m_on_connected(eiger_1_5m, detector_list, detector_state):
|
|||||||
else:
|
else:
|
||||||
eiger.on_connected()
|
eiger.on_connected()
|
||||||
assert mock_set_det.call_args == mock.call(
|
assert mock_set_det.call_args == mock.call(
|
||||||
DetectorSettings(frame_time_us=500, timing=DetectorTiming.TRIGGER), timeout=10
|
DetectorSettings(frame_time_us=500, timing=DetectorTiming.TRIGGER), timeout=5
|
||||||
)
|
)
|
||||||
assert mock_file_writer.call_args == mock.call(
|
assert mock_file_writer.call_args == mock.call(
|
||||||
file_writer_settings=FileWriterSettings(
|
file_writer_settings=FileWriterSettings(
|
||||||
@@ -179,7 +196,7 @@ def test_eiger_9m_on_connected(eiger_9m, detector_list, detector_state):
|
|||||||
else:
|
else:
|
||||||
eiger.on_connected()
|
eiger.on_connected()
|
||||||
assert mock_set_det.call_args == mock.call(
|
assert mock_set_det.call_args == mock.call(
|
||||||
DetectorSettings(frame_time_us=500, timing=DetectorTiming.TRIGGER), timeout=10
|
DetectorSettings(frame_time_us=500, timing=DetectorTiming.TRIGGER), timeout=5
|
||||||
)
|
)
|
||||||
assert mock_file_writer.call_args == mock.call(
|
assert mock_file_writer.call_args == mock.call(
|
||||||
file_writer_settings=FileWriterSettings(
|
file_writer_settings=FileWriterSettings(
|
||||||
@@ -216,11 +233,39 @@ def test_eiger_on_stop(eiger_1_5m):
|
|||||||
stop_event.wait(timeout=5) # Thread should be killed from task_handler
|
stop_event.wait(timeout=5) # Thread should be killed from task_handler
|
||||||
|
|
||||||
|
|
||||||
|
def test_eiger_on_destroy(eiger_1_5m):
|
||||||
|
"""Test the on_destroy logic of the Eiger detector. This is equivalent for 9M and 1_5M."""
|
||||||
|
eiger = eiger_1_5m
|
||||||
|
start_event = threading.Event()
|
||||||
|
stop_event = threading.Event()
|
||||||
|
|
||||||
|
def tmp_task():
|
||||||
|
start_event.set()
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
time.sleep(0.1)
|
||||||
|
finally:
|
||||||
|
stop_event.set()
|
||||||
|
|
||||||
|
eiger.task_handler.submit_task(tmp_task)
|
||||||
|
start_event.wait(timeout=5)
|
||||||
|
|
||||||
|
with (
|
||||||
|
mock.patch.object(eiger.jfj_preview_client, "stop") as mock_jfj_preview_client_stop,
|
||||||
|
mock.patch.object(eiger.jfj_client, "stop") as mock_jfj_client_stop,
|
||||||
|
):
|
||||||
|
eiger.on_destroy()
|
||||||
|
mock_jfj_preview_client_stop.assert_called_once()
|
||||||
|
mock_jfj_client_stop.assert_called_once()
|
||||||
|
stop_event.wait(timeout=5)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.timeout(25)
|
@pytest.mark.timeout(25)
|
||||||
@pytest.mark.parametrize("raise_timeout", [True, False])
|
@pytest.mark.parametrize("raise_timeout", [True, False])
|
||||||
def test_eiger_on_complete(eiger_1_5m, raise_timeout):
|
def test_eiger_on_complete(eiger_1_5m, raise_timeout):
|
||||||
"""Test the on_complete logic of the Eiger detector. This is equivalent for 9M and 1_5M."""
|
"""Test the on_complete logic of the Eiger detector. This is equivalent for 9M and 1_5M."""
|
||||||
eiger = eiger_1_5m
|
eiger = eiger_1_5m
|
||||||
|
eiger._wait_for_on_complete = 1 # reduce wait time for testing
|
||||||
|
|
||||||
callback_completed_event = threading.Event()
|
callback_completed_event = threading.Event()
|
||||||
|
|
||||||
@@ -230,7 +275,7 @@ def test_eiger_on_complete(eiger_1_5m, raise_timeout):
|
|||||||
|
|
||||||
unblock_wait_for_idle = threading.Event()
|
unblock_wait_for_idle = threading.Event()
|
||||||
|
|
||||||
def mock_wait_for_idle(timeout: int, request_timeout: float):
|
def mock_wait_for_idle(timeout: float, raise_on_timeout: bool) -> bool:
|
||||||
if unblock_wait_for_idle.wait(timeout):
|
if unblock_wait_for_idle.wait(timeout):
|
||||||
if raise_timeout:
|
if raise_timeout:
|
||||||
return False
|
return False
|
||||||
@@ -238,11 +283,18 @@ def test_eiger_on_complete(eiger_1_5m, raise_timeout):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
with (
|
with (
|
||||||
|
mock.patch.object(
|
||||||
|
eiger.jfj_client.api, "status_get", return_value=BrokerStatus(state="Idle")
|
||||||
|
),
|
||||||
mock.patch.object(eiger.jfj_client, "wait_for_idle", side_effect=mock_wait_for_idle),
|
mock.patch.object(eiger.jfj_client, "wait_for_idle", side_effect=mock_wait_for_idle),
|
||||||
mock.patch.object(
|
mock.patch.object(
|
||||||
eiger.jfj_client.api,
|
eiger.jfj_client.api,
|
||||||
"statistics_data_collection_get",
|
"statistics_data_collection_get",
|
||||||
return_value=MeasurementStatistics(run_number=1),
|
return_value=MeasurementStatistics(
|
||||||
|
run_number=1,
|
||||||
|
images_collected=eiger.scan_info.msg.num_points
|
||||||
|
* eiger.scan_info.msg.scan_parameters["frames_per_trigger"],
|
||||||
|
),
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
status = eiger.complete()
|
status = eiger.complete()
|
||||||
@@ -284,7 +336,7 @@ def test_eiger_file_event_callback(eiger_1_5m, tmp_path):
|
|||||||
assert file_msg.hinted_h5_entries == {"data": "entry/data/data"}
|
assert file_msg.hinted_h5_entries == {"data": "entry/data/data"}
|
||||||
|
|
||||||
|
|
||||||
def test_eiger_on_sage(eiger_1_5m):
|
def test_eiger_on_stage(eiger_1_5m):
|
||||||
"""Test the on_stage and on_unstage logic of the Eiger detector. This is equivalent for 9M and 1_5M."""
|
"""Test the on_stage and on_unstage logic of the Eiger detector. This is equivalent for 9M and 1_5M."""
|
||||||
eiger = eiger_1_5m
|
eiger = eiger_1_5m
|
||||||
scan_msg = eiger.scan_info.msg
|
scan_msg = eiger.scan_info.msg
|
||||||
@@ -316,3 +368,35 @@ def test_eiger_on_sage(eiger_1_5m):
|
|||||||
)
|
)
|
||||||
assert mock_start.call_args == mock.call(settings=data_settings)
|
assert mock_start.call_args == mock.call(settings=data_settings)
|
||||||
assert eiger.staged is Staged.yes
|
assert eiger.staged is Staged.yes
|
||||||
|
|
||||||
|
|
||||||
|
def test_eiger_set_det_distance_test_beam_center(eiger_1_5m):
|
||||||
|
"""Test the set_detector_distance and set_beam_center methods. Equivalent for 9M and 1_5M."""
|
||||||
|
eiger = eiger_1_5m
|
||||||
|
old_distance = eiger.detector_distance
|
||||||
|
new_distance = old_distance + 100
|
||||||
|
old_beam_center = eiger.beam_center
|
||||||
|
new_beam_center = (old_beam_center[0] + 20, old_beam_center[1] + 50)
|
||||||
|
eiger.set_detector_distance(new_distance)
|
||||||
|
assert eiger.detector_distance == new_distance
|
||||||
|
eiger.set_beam_center(x=new_beam_center[0], y=new_beam_center[1])
|
||||||
|
assert eiger.beam_center == new_beam_center
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
eiger.set_beam_center(x=-10, y=100) # Cannot set negative beam center
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
eiger.detector_distance = -50 # Cannot set negative detector distance
|
||||||
|
|
||||||
|
|
||||||
|
def test_eiger_preview_callback(eiger_1_5m):
|
||||||
|
"""Preview callback test for the Eiger detector. This is equivalent for 9M and 1_5M."""
|
||||||
|
eiger = eiger_1_5m
|
||||||
|
# NOTE: I don't find models for the CBOR messages used by JFJ, currently using a dummay dict.
|
||||||
|
# Please adjust once the proper model is found.
|
||||||
|
for msg_type in ["start", "end", "image", "calibration", "metadata"]:
|
||||||
|
msg = {"type": msg_type, "data": {"default": np.array([[1, 2], [3, 4]])}}
|
||||||
|
with mock.patch.object(eiger.preview_image, "put") as mock_preview_put:
|
||||||
|
eiger._preview_callback(msg)
|
||||||
|
if msg_type == "image":
|
||||||
|
mock_preview_put.assert_called_once_with(msg["data"]["default"])
|
||||||
|
else:
|
||||||
|
mock_preview_put.assert_not_called()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from unittest import mock
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from ophyd_devices.tests.utils import SocketMock
|
from ophyd_devices.tests.utils import SocketMock
|
||||||
|
from ophyd_devices.utils.socket import SocketSignal
|
||||||
|
|
||||||
from csaxs_bec.devices.omny.galil.fupr_ophyd import FuprGalilController, FuprGalilMotor
|
from csaxs_bec.devices.omny.galil.fupr_ophyd import FuprGalilController, FuprGalilMotor
|
||||||
|
|
||||||
@@ -17,6 +18,11 @@ def fsamroy(dm_with_devices):
|
|||||||
socket_cls=SocketMock,
|
socket_cls=SocketMock,
|
||||||
device_manager=dm_with_devices,
|
device_manager=dm_with_devices,
|
||||||
)
|
)
|
||||||
|
for walk in fsamroy_motor.walk_signals():
|
||||||
|
if isinstance(walk.item, SocketSignal):
|
||||||
|
walk.item._readback_timeout = (
|
||||||
|
0.0 # Set the readback timeout to 0 to avoid waiting during tests
|
||||||
|
)
|
||||||
fsamroy_motor.controller.on()
|
fsamroy_motor.controller.on()
|
||||||
assert isinstance(fsamroy_motor.controller, FuprGalilController)
|
assert isinstance(fsamroy_motor.controller, FuprGalilController)
|
||||||
yield fsamroy_motor
|
yield fsamroy_motor
|
||||||
|
|||||||
@@ -9,7 +9,11 @@ from ophyd_devices.tests.utils import SocketMock
|
|||||||
from csaxs_bec.devices.npoint.npoint import NPointAxis, NPointController
|
from csaxs_bec.devices.npoint.npoint import NPointAxis, NPointController
|
||||||
from csaxs_bec.devices.omny.galil.fgalil_ophyd import FlomniGalilController, FlomniGalilMotor
|
from csaxs_bec.devices.omny.galil.fgalil_ophyd import FlomniGalilController, FlomniGalilMotor
|
||||||
from csaxs_bec.devices.omny.galil.fupr_ophyd import FuprGalilController, FuprGalilMotor
|
from csaxs_bec.devices.omny.galil.fupr_ophyd import FuprGalilController, FuprGalilMotor
|
||||||
from csaxs_bec.devices.omny.galil.galil_rio import GalilRIO, GalilRIOController, GalilRIOSignalRO
|
from csaxs_bec.devices.omny.galil.galil_rio import (
|
||||||
|
GalilRIO,
|
||||||
|
GalilRIOAnalogSignalRO,
|
||||||
|
GalilRIOController,
|
||||||
|
)
|
||||||
from csaxs_bec.devices.omny.galil.lgalil_ophyd import LamniGalilController, LamniGalilMotor
|
from csaxs_bec.devices.omny.galil.lgalil_ophyd import LamniGalilController, LamniGalilMotor
|
||||||
from csaxs_bec.devices.omny.galil.ogalil_ophyd import OMNYGalilController, OMNYGalilMotor
|
from csaxs_bec.devices.omny.galil.ogalil_ophyd import OMNYGalilController, OMNYGalilMotor
|
||||||
from csaxs_bec.devices.omny.galil.sgalil_ophyd import GalilController, SGalilMotor
|
from csaxs_bec.devices.omny.galil.sgalil_ophyd import GalilController, SGalilMotor
|
||||||
@@ -272,26 +276,27 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
## Test read of all channels
|
## Test read of all channels
|
||||||
###########
|
###########
|
||||||
|
|
||||||
assert galil_rio.an_ch0._READ_TIMEOUT == 0.1 # Default read timeout of 100ms
|
assert galil_rio.analog_in.ch0._readback_timeout == 0.1 # Default read timeout of 100ms
|
||||||
# Mock the socket to return specific values
|
# Mock the socket to return specific values
|
||||||
galil_rio.controller.sock.buffer_recv = [b" 1.234 2.345 3.456 4.567 5.678 6.789 7.890 8.901"]
|
analog_bufffer = b" 1.234 2.345 3.456 4.567 5.678 6.789 7.890 8.901\r\n"
|
||||||
galil_rio._last_readback = 0 # Force read from controller
|
galil_rio.controller.sock.buffer_recv = [] # Clear any existing buffer
|
||||||
|
galil_rio.controller.sock.buffer_recv.append(analog_bufffer)
|
||||||
read_values = galil_rio.read()
|
read_values = galil_rio.read()
|
||||||
assert len(read_values) == 8 # 8 channels
|
assert len(read_values) == 8 # 8 channels
|
||||||
|
|
||||||
expected_values = {
|
expected_values = {
|
||||||
galil_rio.an_ch0.name: {"value": 1.234},
|
galil_rio.analog_in.ch0.name: {"value": 1.234},
|
||||||
galil_rio.an_ch1.name: {"value": 2.345},
|
galil_rio.analog_in.ch1.name: {"value": 2.345},
|
||||||
galil_rio.an_ch2.name: {"value": 3.456},
|
galil_rio.analog_in.ch2.name: {"value": 3.456},
|
||||||
galil_rio.an_ch3.name: {"value": 4.567},
|
galil_rio.analog_in.ch3.name: {"value": 4.567},
|
||||||
galil_rio.an_ch4.name: {"value": 5.678},
|
galil_rio.analog_in.ch4.name: {"value": 5.678},
|
||||||
galil_rio.an_ch5.name: {"value": 6.789},
|
galil_rio.analog_in.ch5.name: {"value": 6.789},
|
||||||
galil_rio.an_ch6.name: {"value": 7.890},
|
galil_rio.analog_in.ch6.name: {"value": 7.890},
|
||||||
galil_rio.an_ch7.name: {"value": 8.901},
|
galil_rio.analog_in.ch7.name: {"value": 8.901},
|
||||||
}
|
}
|
||||||
# All timestamps should be the same
|
# All timestamps should be the same
|
||||||
assert all(
|
assert all(
|
||||||
ret["timestamp"] == read_values[galil_rio.an_ch0.name]["timestamp"]
|
ret["timestamp"] == read_values[galil_rio.analog_in.ch0.name]["timestamp"]
|
||||||
for signal_name, ret in read_values.items()
|
for signal_name, ret in read_values.items()
|
||||||
)
|
)
|
||||||
# Check values
|
# Check values
|
||||||
@@ -301,7 +306,7 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
|
|
||||||
# Check communication command to socker
|
# Check communication command to socker
|
||||||
assert galil_rio.controller.sock.buffer_put == [
|
assert galil_rio.controller.sock.buffer_put == [
|
||||||
b"MG@AN[0],@AN[1],@AN[2],@AN[3],@AN[4],@AN[5],@AN[6],@AN[7]\r"
|
b"MG@AN[0], @AN[1], @AN[2], @AN[3], @AN[4], @AN[5], @AN[6], @AN[7]\r"
|
||||||
]
|
]
|
||||||
|
|
||||||
###########
|
###########
|
||||||
@@ -313,11 +318,11 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
|
|
||||||
def value_callback(value, old_value, **kwargs):
|
def value_callback(value, old_value, **kwargs):
|
||||||
obj = kwargs.get("obj")
|
obj = kwargs.get("obj")
|
||||||
galil = obj.parent
|
galil = obj.parent.parent
|
||||||
readback = galil.read()
|
readback = galil.read()
|
||||||
value_callback_buffer.append(readback)
|
value_callback_buffer.append(readback)
|
||||||
|
|
||||||
galil_rio.an_ch0.subscribe(value_callback, run=False)
|
galil_rio.analog_in.ch0.subscribe(value_callback, run=False)
|
||||||
galil_rio.controller.sock.buffer_recv = [b" 2.5 2.6 2.7 2.8 2.9 3.0 3.1 3.2"]
|
galil_rio.controller.sock.buffer_recv = [b" 2.5 2.6 2.7 2.8 2.9 3.0 3.1 3.2"]
|
||||||
expected_values = [2.5, 2.6, 2.7, 2.8, 2.9, 3.0, 3.1, 3.2]
|
expected_values = [2.5, 2.6, 2.7, 2.8, 2.9, 3.0, 3.1, 3.2]
|
||||||
|
|
||||||
@@ -327,13 +332,15 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
|
|
||||||
# Should have used the cached value
|
# Should have used the cached value
|
||||||
for walk in galil_rio.walk_signals():
|
for walk in galil_rio.walk_signals():
|
||||||
walk.item._READ_TIMEOUT = 10 # Make sure cached read is used
|
walk.item._readback_timeout = 10 # Make sure cached read is used
|
||||||
ret = galil_rio.an_ch0.read()
|
ret = galil_rio.analog_in.ch0.read()
|
||||||
|
|
||||||
# Should not trigger callback since value did not change
|
# Should not trigger callback since value did not change
|
||||||
assert np.isclose(ret[galil_rio.an_ch0.name]["value"], 1.234)
|
assert np.isclose(ret[galil_rio.analog_in.ch0.name]["value"], 1.234)
|
||||||
# Same timestamp as for another channel as this is cached read
|
# Same timestamp as for another channel as this is cached read
|
||||||
assert np.isclose(ret[galil_rio.an_ch0.name]["timestamp"], galil_rio.an_ch7.timestamp)
|
assert np.isclose(
|
||||||
|
ret[galil_rio.analog_in.ch0.name]["timestamp"], galil_rio.analog_in.ch7.timestamp
|
||||||
|
)
|
||||||
assert len(value_callback_buffer) == 0
|
assert len(value_callback_buffer) == 0
|
||||||
|
|
||||||
##################
|
##################
|
||||||
@@ -341,10 +348,10 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
##################
|
##################
|
||||||
|
|
||||||
# Now force a read from the controller
|
# Now force a read from the controller
|
||||||
galil_rio._last_readback = 0 # Force read from controller
|
galil_rio.analog_in.ch0._last_readback = 0 # Force read from controller
|
||||||
ret = galil_rio.an_ch0.read()
|
ret = galil_rio.analog_in.ch0.read()
|
||||||
|
|
||||||
assert np.isclose(ret[galil_rio.an_ch0.name]["value"], 2.5)
|
assert np.isclose(ret[galil_rio.analog_in.ch0.name]["value"], 2.5)
|
||||||
|
|
||||||
# Check callback invocation, but only 1 callback even with galil_rio.read() call in callback
|
# Check callback invocation, but only 1 callback even with galil_rio.read() call in callback
|
||||||
assert len(value_callback_buffer) == 1
|
assert len(value_callback_buffer) == 1
|
||||||
@@ -352,7 +359,45 @@ def test_galil_rio_signal_read(galil_rio):
|
|||||||
assert np.isclose(values, expected_values).all()
|
assert np.isclose(values, expected_values).all()
|
||||||
assert all(
|
assert all(
|
||||||
[
|
[
|
||||||
value["timestamp"] == value_callback_buffer[0][galil_rio.an_ch0.name]["timestamp"]
|
value["timestamp"]
|
||||||
|
== value_callback_buffer[0][galil_rio.analog_in.ch0.name]["timestamp"]
|
||||||
for value in value_callback_buffer[0].values()
|
for value in value_callback_buffer[0].values()
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_galil_rio_digital_out_signal(galil_rio):
|
||||||
|
"""
|
||||||
|
Test that the Galil RIO digital output signal can be set correctly.
|
||||||
|
"""
|
||||||
|
## Test Read from digital output channels
|
||||||
|
buffer_receive = []
|
||||||
|
excepted_put_buffer = []
|
||||||
|
for ii in range(galil_rio.digital_out.ch0._NUM_DIGITAL_OUTPUT_CHANNELS):
|
||||||
|
cmd = f"MG@OUT[{ii}]\r".encode()
|
||||||
|
excepted_put_buffer.append(cmd)
|
||||||
|
recv = " 1.000".encode()
|
||||||
|
buffer_receive.append(recv)
|
||||||
|
|
||||||
|
galil_rio.controller.sock.buffer_recv = buffer_receive # Mock response for readback
|
||||||
|
|
||||||
|
digital_read = galil_rio.read_configuration() # Read to populate readback values
|
||||||
|
|
||||||
|
for walk in galil_rio.digital_out.walk_signals():
|
||||||
|
assert np.isclose(digital_read[walk.item.name]["value"], 1.0)
|
||||||
|
|
||||||
|
assert galil_rio.controller.sock.buffer_put == excepted_put_buffer
|
||||||
|
|
||||||
|
# Test writing to digital output channels
|
||||||
|
galil_rio.controller.sock.buffer_put = [] # Clear buffer put
|
||||||
|
galil_rio.controller.sock.buffer_recv = [b":"] # Mock response for readback
|
||||||
|
|
||||||
|
# Set digital output channel 0 to high
|
||||||
|
galil_rio.digital_out.ch0.put(1)
|
||||||
|
assert galil_rio.controller.sock.buffer_put == [b"SB0\r"]
|
||||||
|
|
||||||
|
# Set digital output channel 0 to low
|
||||||
|
galil_rio.controller.sock.buffer_put = [] # Clear buffer put
|
||||||
|
galil_rio.controller.sock.buffer_recv = [b":"] # Mock response for readback
|
||||||
|
galil_rio.digital_out.ch0.put(0)
|
||||||
|
assert galil_rio.controller.sock.buffer_put == [b"CB0\r"]
|
||||||
|
|||||||
Reference in New Issue
Block a user