more fixes

This commit is contained in:
2025-11-14 15:32:30 +01:00
parent cc1dedcbca
commit 545d4d80ff
10 changed files with 345 additions and 138 deletions
+178 -46
View File
@@ -11,18 +11,77 @@ import time
from ..elements.adjustable import AdjustableFS
class RunFilenameGenerator:
def __init__(self, path, prefix="run", Ndigits=4, separator="_", suffix="json"):
self.separator = separator
self.prefix = prefix
self.Ndigits = Ndigits
self.path = Path(path)
self.suffix = suffix
def get_existing_runnumbers(self):
fl = self.path.glob(
self.prefix + self.Ndigits * "[0-9]" + self.separator + "*." + self.suffix
)
fl = [tf for tf in fl if tf.is_file()]
runnos = [
int(tf.name.split(self.prefix)[1].split(self.separator)[0]) for tf in fl
]
return runnos
def get_run_info_file(self, runno):
fl = self.path.glob(
self.prefix
+ f"{runno:0{self.Ndigits}d}"
+ self.separator
+ "*."
+ self.suffix
)
fl = [tf for tf in fl if tf.is_file()]
if len(fl) > 1:
raise Exception(
f"Found multiple files in {self.path} with run number {runno}"
)
return fl[0]
def get_nextrun_number(self):
runnos = self.get_existing_runnumbers()
if runnos:
return max(runnos) + 1
else:
return 0
def get_nextrun_filename(self, name):
runnos = self.get_existing_runnumbers()
if runnos:
runno = max(runnos) + 1
else:
runno = 0
return (
self.prefix
+ "{{:0{:d}d}}".format(self.Ndigits).format(runno)
+ self.separator
+ name
+ "."
+ self.suffix
)
class EpicsDaq:
def __init__(
self,
elog=None,
name=None,
pgroup=None,
channel_list=None,
default_filepath=None,
):
self.name = name
self.default_file_path = AdjustableFS(
f"/sf/bernina/config/eco/reference_values/{name}_default_file_path.json",
default_value="~/data/",
name="default_file_path",
self.pgroup = pgroup
self.alternative_file_path = AdjustableFS(
f"/sf/bernina/config/eco/reference_values/{name}_alternative_file_path.json",
default_value=False,
name="alternative_file_path",
)
self._elog = elog
self.channels = {}
@@ -32,11 +91,15 @@ class EpicsDaq:
@property
def _default_file_path(self):
return self.default_file_path()
if self.alternative_file_path() == False:
file_path = f"/sf/bernina/data/{self.pgroup()}/res/run_data/epics_daq/data/"
else:
file_path = self.alternative_file_path()
return file_path
@_default_file_path.setter
def _default_file_path(self, val):
self.default_file_path(val)
self.alternative_file_path(val)
def update_channels(self):
channels = self.channel_list.get_current_value()
@@ -44,7 +107,7 @@ class EpicsDaq:
if not (channel in self.channels.keys()):
self.channels[channel] = PV(channel, auto_monitor=True)
def h5(self, fina=None, channel_list=None, N_pulses=None, queue_size=100):
def h5(self, fina=None, channel_list=None, Npulses=None, queue_size=100):
if channel_list is None:
channel_list = self.channel_list
if not channel_list.get_current_value() == list(self.channels.keys()):
@@ -58,65 +121,134 @@ class EpicsDaq:
else:
return
data = self.get_data(channel_list=None, Npulses=None, queue_size=100)
f = h5py.File(name=fina, mode="w")
for k in data.keys():
dat = f.create_group(name=k)
dat.create_dataset(name="data", data=data[k]["values"])
dat.create_dataset(name="timestamps", data=data[k]["timestamps"])
dat.create_dataset(
name="pulse_id", data=np.arange(Npulses) + round(time.time() * 100)
)
return data
def get_data(self, channel_list=None, Npulses=None, queue_size=100, **kwargs):
if channel_list is None:
channel_list = self.channel_list
if not channel_list.get_current_value() == list(self.channels.keys()):
self.update_channels()
data = {}
counters = {}
channels = self.channels
for k, channel in channels.items():
channelval = channel.value
if type(channelval) == np.ndarray:
shape = (N_pulses,) + channelval.shape
shape = (Npulses,) + channelval.shape
dtype = channelval.dtype
else:
shape = (N_pulses,)
shape = (Npulses,)
dtype = type(channelval)
data[k] = np.ndarray(shape, dtype=dtype)
data[k] = {
"values": np.ndarray(
shape,
dtype=dtype,
),
"timestamps": np.ndarray(
(Npulses,),
dtype=float,
),
}
counters[k] = 0
def cb_getdata(ch=None, k="", *args, **kwargs):
data[k][counters[k]] = kwargs["value"]
data[k]["values"][counters[k]] = kwargs["value"]
data[k]["timestamps"][counters[k]] = kwargs["timestamp"]
counters[k] = counters[k] + 1
if counters[k] == N_pulses:
if counters[k] == Npulses:
ch.clear_callbacks()
for k, channel in channels.items():
channel.add_callback(callback=cb_getdata, ch=channel, k=k)
while True:
sleep(0.005)
if np.mean(list(counters.values())) == N_pulses:
if np.mean(list(counters.values())) == Npulses:
break
f = h5py.File(name=fina, mode="w")
for k in channels.keys():
dat = f.create_group(name=k)
dat.create_dataset(name="data", data=data[k])
dat.create_dataset(
name="pulse_id", data=np.arange(N_pulses) + round(time.time() * 100)
)
return data
def acquire(self, file_name=None, Npulses=100, default_path=True):
file_name += ".h5"
if default_path:
file_name = self._default_file_path + file_name
data_dir = Path(os.path.dirname(file_name))
# def acquire(self, Npulses=100, default_path=True, scan=None):
# file_name = scan._description
# file_name += ".h5"
# if default_path:
# file_name = self._default_file_path + file_name
# data_dir = Path(os.path.dirname(file_name))
if not data_dir.exists():
print(
f"Path {data_dir.absolute().as_posix()} does not exist, will try to create it..."
)
data_dir.mkdir(parents=True)
print(f"Tried to create {data_dir.absolute().as_posix()}")
data_dir.chmod(0o775)
print(f"Tried to change permissions to 775")
# if not data_dir.exists():
# print(
# f"Path {data_dir.absolute().as_posix()} does not exist, will try to create it..."
# )
# data_dir.mkdir(parents=True)
# print(f"Tried to create {data_dir.absolute().as_posix()}")
# data_dir.chmod(0o775)
# print(f"Tried to change permissions to 775")
#
# def acquire():
# self.h5(fina=file_name, Npulses=Npulses)
# return Acquisition(
# acquire=acquire,
# acquisition_kwargs={"file_names": [file_name], "Npulses": Npulses},
# hold=False,
# )
def acquire(self, scan=None, Npulses=None, **kwargs):
acq_pars = {}
if scan:
scan_wr = weakref.ref(scan)
acq_pars = {
"scan_info": {
"scan_name": scan.description(),
"scan_values": scan.values_current_step,
"scan_readbacks": scan.readbacks_current_step,
"name": [adj.name for adj in scan.adjustables],
"expected_total_number_of_steps": scan.number_of_steps(),
"scan_step_info": {
"step_number": scan.next_step + 1,
},
},
}
acquisition = Acquisition(
acquire=None,
acquisition_kwargs={"Npulses": Npulses},
)
def acquire():
self.h5(fina=file_name, N_pulses=Npulses)
t_tmp = time.time()
det_val = self.get_data(Npulses)
scan_wr().detector_values.append(det_val)
t_stop = time.time()
scan_wr().timestamp_intervals.append(StepTime(t_tmp, t_stop))
return Acquisition(
acquire=acquire,
acquisition_kwargs={"file_names": [file_name], "Npulses": Npulses},
hold=False,
)
acquisition.set_acquire_foo(acquire, hold=False)
return acquisition
def create_arrays(self, scan, **kwargs):
scan.monitor_scan_arrays = {}
for monname, mon in scan.monitors.items():
scan.monitor_scan_arrays[monname] = ArrayTimestamps(
data=mon.data["values"],
timestamps=mon.data["timestamps"],
timestamp_intervals=scan.timestamp_intervals,
parameter=parameter_from_scan(scan),
name=monname,
)
def wait_done(self):
self.check_running()
@@ -150,7 +282,7 @@ class Epicstools:
for channel in self.channel_list:
self.channels.append(PV(channel, auto_monitor=True))
def h5(self, fina=None, channel_list=None, N_pulses=None, queue_size=100):
def h5(self, fina=None, channel_list=None, Npulses=None, queue_size=100):
channel_list = self.channel_list
if os.path.isfile(fina):
@@ -167,10 +299,10 @@ class Epicstools:
for channel in channels:
channelval = channel.value
if type(channelval) == np.ndarray:
shape = (N_pulses,) + channelval.shape
shape = (Npulses,) + channelval.shape
dtype = channelval.dtype
else:
shape = (N_pulses,)
shape = (Npulses,)
dtype = type(channelval)
data.append(np.ndarray(shape, dtype=dtype))
counters.append(0)
@@ -178,14 +310,14 @@ class Epicstools:
def cb_getdata(ch=None, m=0, *args, **kwargs):
data[m][counters[m]] = kwargs["value"]
counters[m] = counters[m] + 1
if counters[m] == N_pulses:
if counters[m] == Npulses:
ch.clear_callbacks()
for m, channel in enumerate(channels):
channel.add_callback(callback=cb_getdata, ch=channel, m=m)
while True:
sleep(0.005)
if np.mean(counters) == N_pulses:
if np.mean(counters) == Npulses:
break
f = h5py.File(name=fina, mode="w")
@@ -193,7 +325,7 @@ class Epicstools:
dat = f.create_group(name=channel)
dat.create_dataset(name="data", data=data[n])
dat.create_dataset(
name="pulse_id", data=np.arange(N_pulses) + round(time.time() * 100)
name="pulse_id", data=np.arange(Npulses) + round(time.time() * 100)
)
return data
@@ -213,7 +345,7 @@ class Epicstools:
print(f"Tried to change permissions to 775")
def acquire():
self.h5(fina=file_name, N_pulses=Npulses)
self.h5(fina=file_name, Npulses=Npulses)
return Acquisition(
acquire=acquire,
+112 -60
View File
@@ -1136,19 +1136,19 @@ namespace.append_obj(
# this is the large inline camera
namespace.append_obj(
"MicroscopeMotorRecord",
pvname_camera="SARES20-CAMS142-M3", # GIC
pvname_camera="SARES20-CAMS142-C1", # GIC
pvname_zoom="SARES20-MF1:MOT_14",
lazy=True,
name="samplecam_inline_top",
name="jetcam_top",
module_name="eco.microscopes",
)
namespace.append_obj(
"CameraBasler",
# pvname_camera="SARES20-CAMS142-M3", #THC
"SARES20-CAMS142-C1", # GIC
"SARES20-CAMS142-M1", # GIC
lazy=True,
name="samplecam_inline_bottom",
name="jetcam_back",
module_name="eco.microscopes",
)
@@ -1389,18 +1389,18 @@ from eco.devices_general.motors import ThorlabsPiezoRecord
class Incoupling(Assembly):
def __init__(self, delaystage_pump=None, name=None):
super().__init__(name=name)
self._append(
SmaractRecord, "SARES20-MCS2:MOT_13", name="thz_par2_x", is_setting=True
)
self._append(
SmaractRecord, "SARES20-MCS2:MOT_16", name="thz_par2_z", is_setting=True
)
self._append(
SmaractRecord, "SARES20-MCS2:MOT_14", name="thz_par2_ry", is_setting=True
)
self._append(
SmaractRecord, "SARES20-MCS2:MOT_15", name="thz_par2_rx", is_setting=True
)
# self._append(
# SmaractRecord, "SARES20-MCS2:MOT_13", name="thz_par2_x", is_setting=True
# )
# self._append(
# SmaractRecord, "SARES20-MCS2:MOT_16", name="thz_par2_z", is_setting=True
# )
# self._append(
# SmaractRecord, "SARES20-MCS2:MOT_14", name="thz_par2_ry", is_setting=True
# )
# self._append(
# SmaractRecord, "SARES20-MCS2:MOT_15", name="thz_par2_rx", is_setting=True
# )
self._append(
SmaractRecord, "SARES20-MCS2:MOT_11", name="thz_par1_z", is_setting=True
)
@@ -1509,40 +1509,40 @@ class Incoupling(Assembly):
is_setting=False,
)
self._append(
AdjustableVirtual,
[self.thz_par1_z, self.thz_par2_z],
lambda z1, z2: z2,
lambda z: [
self.thz_par1_z.get_current_value()
+ (z - self.thz_par2_z.get_current_value()),
z,
],
name="thz_focus",
is_setting=False,
is_display=False,
)
# self._append(
# AdjustableVirtual,
# [self.thz_par1_z, self.thz_par2_z],
# lambda z1, z2: z2,
# lambda z: [
# self.thz_par1_z.get_current_value()
# + (z - self.thz_par2_z.get_current_value()),
# z,
# ],
# name="thz_focus",
# is_setting=False,
# is_display=False,
# )
self._append(
delaystage_pump,
name="delaystage_pump",
is_setting=False,
is_display=False,
)
# self._append(
# delaystage_pump,
# name="delaystage_pump",
# is_setting=False,
# is_display=False,
# )
self._append(
AdjustableVirtual,
[self.delaystage_pump, self.thz_par2_x],
lambda d, x: x,
lambda x: [
self.delaystage_pump.get_current_value()
+ (x - self.thz_par2_x.get_current_value()) / 2,
x,
],
name="thz_par2_x_delaycomp",
is_setting=False,
is_display=False,
)
# self._append(
# AdjustableVirtual,
# [self.delaystage_pump, self.thz_par2_x],
# lambda d, x: x,
# lambda x: [
# self.delaystage_pump.get_current_value()
# + (x - self.thz_par2_x.get_current_value()) / 2,
# x,
# ],
# name="thz_par2_x_delaycomp",
# is_setting=False,
# is_display=False,
# )
# def thz_pol_set(self, val):
# return 1.0 * val, 1.0 / 2 * val
@@ -2439,8 +2439,12 @@ namespace.append_obj(IlluminatorsLasers, name="sample_illumination", lazy=True)
class LiquidJetSpectroscopy(Assembly):
def __init__(self, pgroup_adj=None, config_JF_adj=None, name=None):
def __init__(
self, pgroup_adj=None, config_JF_adj=None, name=None, v_g=None, e2v=None
):
super().__init__(name=name)
self._v_g = v_g
self._e2v = e2v
self._append(
MotorRecord,
"SARES20-MF1:MOT_12",
@@ -2450,18 +2454,24 @@ class LiquidJetSpectroscopy(Assembly):
)
self._append(
MotorRecord,
"SARES20-MF1:MOT_10",
"SARES20-XPS1:MOT_JET_Y",
name="y",
backlash_definition=True,
is_setting=True,
)
self._append(
MotorRecord,
"SARES20-MF1:MOT_11",
"SARES20-MF1:MOT_13",
name="z",
backlash_definition=True,
is_setting=True,
)
self._append(
MpodChannel,
pvbase="SARES21-PS7071",
channel_number=4,
name="light",
)
# self._append(
# MotorRecord,y=True,
# "SARES20-MF1:MOT_3",
@@ -2477,20 +2487,62 @@ class LiquidJetSpectroscopy(Assembly):
#
self._append(
Jungfrau,
"JF04T01V01",
name="det_em",
"JF03T01V02",
name="det_jf",
pgroup_adj=pgroup_adj,
config_adj=config_JF_adj,
)
self._append(
MpodChannel,
pvbase="SARES21-PS7071",
module_string="HV_EHS_3",
channel_number=1,
name="apd",
)
self._append(
AdjustableFS,
"/photonics/home/gac-bernina/eco/configuration/apd_voltage_calibration",
name="apd_voltage_calibration",
is_display=False,
is_setting=True,
)
# Convert energy → voltage through calibration
def ene2volt(energy):
try:
E, V = np.asarray(self.apd_voltage_calibration()).T
return np.interp(energy, E, V)
except:
return np.nan
# Getter: read the APD voltage and return it as the virtual value
def get_voltage(apd_voltage):
return apd_voltage
# Setter: compute voltage from energy and set it
def set_voltage(target_energy):
voltage = ene2volt(target_energy)
self.apd.voltage.set_target_value(voltage)
return voltage
# Create virtual adjustable:
self._append(
AdjustableVirtual,
[self.apd.voltage], # real adjustable(s)
get_voltage, # getter
set_voltage, # setter
reset_current_value_to=False,
name="ene2volt",
)
# namespace.append_obj(
# LiquidJetSpectroscopy,
# pgroup_adj=config_bernina.pgroup,
# config_JF_adj=config_JFs,
# name="liquidjet",
# lazy=True,
# )
namespace.append_obj(
LiquidJetSpectroscopy,
pgroup_adj=config_bernina.pgroup,
config_JF_adj=config_JFs,
name="jet",
lazy=True,
)
from eco.detector import Jungfrau
+3 -2
View File
@@ -48,7 +48,7 @@ class Pipeline(Assembly):
precision=0,
check_interval=None,
name="_config",
is_setting=False,
is_setting=True,
is_display=False,
)
@@ -56,7 +56,8 @@ class Pipeline(Assembly):
AdjustableObject,
self._config,
name="config",
is_setting=True,
is_setting=False,
# recursive=False,
is_display="recursive",
)
self._append(
+11 -12
View File
@@ -6,9 +6,9 @@ from functools import partial
class AdjustableObject(Assembly):
def __init__(self, adjustable_dict, is_setting_children = True, name=None):
def __init__(self, adjustable_dict, is_setting_children=False, name=None):
super().__init__(name=name)
self._base_dict = adjustable_dict
self._append(adjustable_dict, name="_base_dict", is_setting=False)
self.init_object(is_setting_children=is_setting_children)
def set_field(self, fieldname, value):
@@ -30,7 +30,7 @@ class AdjustableObject(Assembly):
self._base_dict.set_target_value(tmp)
self.__init__(self._base_dict, name=self.name)
def init_object(self, is_setting_children=True):
def init_object(self, is_setting_children=False):
# super().__init__(name=self.name)
for k, v in self._base_dict.get_current_value().items():
tadj = AdjustableGetSet(
@@ -51,29 +51,30 @@ class AdjustableObject(Assembly):
)
else:
self._append(
tadj, call_obj=False, is_setting=is_setting_children, is_display=True, name=ln
tadj,
call_obj=False,
is_setting=is_setting_children,
is_display=True,
name=ln,
)
class DetectorObject(Assembly):
def __init__(self, detector_dict, name=None):
super().__init__(name=name)
self._base_dict = detector_dict
self.init_object()
def get_field(self, fieldname):
d = self._base_dict.get_current_value()
if fieldname not in d.keys():
raise Exception(f"{fieldname} is not in dictionary")
return d[fieldname]
def init_object(self):
# super().__init__(name=self.name)
for k, v in self._base_dict.get_current_value().items():
tdet = DetectorGet(
partial(self.get_field, k), name=k
)
tdet = DetectorGet(partial(self.get_field, k), name=k)
if k in self.__dict__.keys():
ln = f"{k}_"
else:
@@ -91,5 +92,3 @@ class DetectorObject(Assembly):
self._append(
tdet, call_obj=False, is_setting=False, is_display=True, name=ln
)
+13 -1
View File
@@ -159,6 +159,7 @@ class Assembly:
is_setting=False,
is_display=True,
is_status=True,
recursive=None,
call_obj=True,
overwrite=False,
**kwargs,
@@ -189,12 +190,23 @@ class Assembly:
# if is_status == "auto":
# is_status = isinstance(self.__dict__[name], Detector)
if is_setting:
if isinstance(is_setting, str):
recursive = is_setting.lower() == "recursive"
elif not (recursive is None):
recursive = recursive
else:
recursive = True
self.status_collection.append(
self.__dict__[name], selection="settings", recursive=True
self.__dict__[name], selection="settings", recursive=recursive
)
# self.status_collection.append(
# self.__dict__[name], selection="settings", recursive=True
# )
if is_display:
if isinstance(is_display, str):
recursive = is_display.lower() == "recursive"
elif not (recursive is None):
recursive = recursive
else:
recursive = False
self.status_collection.append(
+1 -2
View File
@@ -274,7 +274,6 @@ class Memory:
for trec in self.categories["recall"]:
rec.update(mem[trec])
print(rec)
if force:
select = [True] * len(rec.items())
else:
@@ -290,7 +289,7 @@ class Memory:
changes = []
for sel, (key, val) in zip(select, rec.items()):
if sel:
to = name2obj(self.obj_parent, key)
to = name2obj(self.obj_parent(), key)
if set_changes_only:
if to.get_current_value() == val:
continue
+4 -4
View File
@@ -1112,15 +1112,15 @@ class LaserBernina(Assembly):
self._append(
SmaractRecord,
"SLAAR21-LMTS-SMAR1:MOT_0",
name="delaystage_frog",
"SLAAR21-LMTS-SMAR1:MOT_3",
name="delaystage_thz_lno",
is_setting=True,
)
self._append(
DelayTime,
self.delaystage_frog,
name="delay_frog",
self.delaystage_thz_lno,
name="delay_thz_lno",
is_setting=True,
)
+2 -2
View File
@@ -71,10 +71,10 @@ class Gsheet_API:
spreadsheet = self.gc.create(
title=f"run_table_{exp_id}", folder_id="1F7DgF0HW1O71nETpfrTvQ35lRZCs5GvH"
)
spreadsheet.add_worksheet("Custom table")
spreadsheet.add_worksheet("Custom table", 10,10)
spreadsheet.add_worksheet("runtable", 10, 10)
spreadsheet.add_worksheet("positions", 10, 10)
spreadsheet.add_worksheet("Available keys")
spreadsheet.add_worksheet("Available keys", 10,10)
ws = spreadsheet.get_worksheet(0)
spreadsheet.del_worksheet(ws)
+20 -8
View File
@@ -163,7 +163,7 @@ class SolidTargetDetectorPBPS(Assembly):
# calc=None,
# calc_calib={},
pipeline_computation=None,
bias_pvextensions = {'all':":Lnk9-BP0-V-HV-ALL-SP"},
bias_pvextensions={"all": ":Lnk9-BP0-V-HV-ALL-SP"},
):
super().__init__(name=name)
self.pvname = pvname
@@ -234,10 +234,10 @@ class SolidTargetDetectorPBPS(Assembly):
)
if bias_pvextensions:
if 'all' in bias_pvextensions.keys():
if "all" in bias_pvextensions.keys():
self._append(
AdjustablePv,
self.pvname+bias_pvextensions['all'],
self.pvname + bias_pvextensions["all"],
name="bias_all",
is_setting=True,
)
@@ -318,7 +318,13 @@ class SolidTargetDetectorPBPS(Assembly):
is_display=False,
)
if pipeline_computation:
self._append(Pipeline, pipeline_computation,name='pipeline_comp', is_setting=True, is_display=False)
self._append(
Pipeline,
pipeline_computation,
name="pipeline_comp",
is_setting=True,
is_display=False,
)
def get_calibration_values(self, seconds=5, return_data=False):
self.x_diodes.set_target_value(0).wait()
@@ -338,7 +344,7 @@ class SolidTargetDetectorPBPS(Assembly):
print(f"Got {nsamples} samples in {seconds} s.")
norm_diodes = [1 / tm / 4 for tm in mean]
if return_data:
return data,norm_diodes
return data, norm_diodes
return norm_diodes
def set_calibration_values(self, norm_diodes):
@@ -1016,7 +1022,7 @@ class SolidTargetDetectorPBPS_assembly(Assembly):
std = [np.std(td) for td in data]
norm_diodes = [1 / tm / 4 for tm in mean]
if return_data:
return data,norm_diodes
return data, norm_diodes
return norm_diodes
def set_calibration_values(self, norm_diodes):
@@ -1156,7 +1162,7 @@ class SolidTargetDetectorBerninaUSD(Assembly):
name=None,
# calc=None,
# calc_calib={},
pipeline_computation = None,
pipeline_computation=None,
):
super().__init__(name=name)
@@ -1268,7 +1274,13 @@ class SolidTargetDetectorBerninaUSD(Assembly):
# is_display=False,
# )
if pipeline_computation:
self._append(Pipeline, pipeline_computation,name='pipeline_comp', is_setting=True, is_display=False)
self._append(
Pipeline,
pipeline_computation,
name="pipeline_comp",
is_setting=True,
is_display=False,
)
def get_calibration_values(self, seconds=5):
self.x_diodes.set_target_value(0).wait()
+1 -1
View File
@@ -23,7 +23,7 @@ from eco.xoptics.dcm_pathlength_compensation import MonoTimecompensation
@get_from_archive
@spec_convenience
@update_changes
@tweak_option
# @tweak_option
class DoubleCrystalMono(Assembly):
def __init__(
self,