before janod

This commit is contained in:
2025-02-18 09:39:31 +01:00
parent 63c4a48317
commit 66cf353a36
16 changed files with 1047 additions and 626 deletions
+22 -14
View File
@@ -258,10 +258,12 @@ class Daq(Assembly):
"detectors"
]
def get_JFs_running(self):
return requests.get(f"{self.broker_address}/get_running_detectors").json()[
"detectors"
]
def get_JFs_running(self, return_full_response=False):
res = requests.get(f"{self.broker_address}/get_running_detectors").json()
if return_full_response:
return res
else:
return res["running_detectors"]
def power_on_JF(self, JF_channel):
par = {"detector_name": JF_channel}
@@ -305,22 +307,28 @@ class Daq(Assembly):
json={"pgroup": pgroup, "run_number": run_number, "files": file_names},
)
def check_counters(self, channels_to_check = ['channels_BSCAM', 'channels_JF'], timeout=3):
def check_counters(
self, channels_to_check=["channels_BSCAM", "channels_JF"], timeout=3
):
if not set(self.channels.keys()).intersection(set(channels_to_check)):
return
print("FYI, selected channels are")
for nam,chs in self.channels.items():
for nam, chs in self.channels.items():
if nam in channels_to_check:
print(f'{nam} : {chs.get_current_value()}')
try:
o = inputimeout.inputimeout(prompt=f'Press Ctrl-c to abort, Return to continue, or wait {timeout} seconds',timeout=timeout)
print(f"{nam} : {chs.get_current_value()}")
try:
o = inputimeout.inputimeout(
prompt=f"Press Ctrl-c to abort, Return to continue, or wait {timeout} seconds",
timeout=timeout,
)
except inputimeout.TimeoutOccurred:
print('... timed out, continuing with selection.')
print("... timed out, continuing with selection.")
except KeyboardInterrupt:
raise Exception('User-requested cancelling!')
else:
if o=='c':
raise Exception('User-requested cancelling!')
raise Exception("User-requested cancelling!")
else:
if o == "c":
raise Exception("User-requested cancelling!")
# def get_dap_settings(detector_name):
# dap_parameters = {}
+36 -16
View File
@@ -119,7 +119,7 @@ namespace.append_obj(
)
namespace.append_obj(
"DataApi",
"DataHub",
name="archiver",
module_name="eco.dbase.archiver",
pv_pulse_id="SARES20-CVME-01-EVR0:RX-PULSEID",
@@ -727,7 +727,7 @@ namespace.append_obj(
namespace.append_obj(
"Pprm_dsd",
pvname="SARES20-DSDPPRM",
pvname_camera="SARES20-DSDPPRM",
pvname_camera="SARES20-PROF146-M1",
module_name="eco.xdiagnostics.profile_monitors",
name="prof_dsd",
lazy=True,
@@ -1122,16 +1122,6 @@ namespace.append_obj(
# config_adj=config_JFs,
# lazy=True,)
# namespace.append_obj(
# "Jungfrau",
# "JF03T01V02",
# name="det_i0",
# pgroup_adj=config_bernina.pgroup,
# module_name="eco.detector.jungfrau",
# config_adj=config_JFs,
# lazy=True,
# )
# namespace.append_obj(
# "Jungfrau",
# "JF04T01V01",
@@ -1142,6 +1132,16 @@ namespace.append_obj(
# lazy=True,
# )
namespace.append_obj(
"Jungfrau",
"JF03T01V02",
name="det_i0",
pgroup_adj=config_bernina.pgroup,
module_name="eco.detector.jungfrau",
config_adj=config_JFs,
lazy=True,
)
# namespace.append_obj(
# "Jungfrau",
# "JF14T01V01",
@@ -1286,11 +1286,12 @@ namespace.append_obj(
module_name="eco.utilities.runtable",
exp_id=config_bernina.pgroup._value,
exp_path=f"/sf/bernina/data/{config_bernina.pgroup._value}/res/run_table/",
devices="bernina",
devices="eco.bernina",
keydf_fname="/sf/bernina/config/src/python/gspread/gspread_keys.pkl",
cred_fname="/sf/bernina/config/src/python/gspread/pandas_push",
gsheet_key_path="/sf/bernina/config/eco/reference_values/run_table_gsheet_keys",
lazy=True,
parse=True, # <-- set this to False to avoid parsing and only add the status information to the runtable
)
@@ -1621,7 +1622,12 @@ class Monitor:
import traceback
def append_scan_monitors(scan, daq=daq, **kwargs):
def append_scan_monitors(
scan,
daq=daq,
custom_monitors={},
**kwargs,
):
scan.monitors = {}
for adj in scan.adjustables:
try:
@@ -1645,6 +1651,15 @@ def append_scan_monitors(scan, daq=daq, **kwargs):
print(f"Could not add CA readback monitor for {tname}")
traceback.print_exc()
for tname, tobj in custom_monitors.items():
try:
if type(tobj) is str:
tmonpv = tobj
scan.monitors[tname] = Monitor(tmonpv)
print(f"Added custom monitor for {tname}")
except Exception:
print(f"Could not add custom monitor for {tname}")
traceback.print_exc()
try:
tname = daq.pulse_id.alias.get_full_name()
scan.monitors[tname] = Monitor(daq.pulse_id.pvname)
@@ -1796,7 +1811,7 @@ def _create_metadata_structure_start_scan(
scan._elog_id = elog_ids[1]
metadata.update({"elog_message_id": scan._elog_id})
metadata.update(
{"elog_post_link": scan._elog[1]._log._url + str(scan._elog_id)}
{"elog_post_link": scan._elog.elogs[1]._log._url + str(scan._elog_id)}
)
except:
print("Elog posting failed with:")
@@ -2047,7 +2062,12 @@ namespace.append_obj(
name="clic",
module_name="eco.loptics.bernina_laser",
)
namespace.append_obj(
"MidIR",
lazy=True,
name="midir",
module_name="eco.loptics.bernina_laser",
)
from ..elements.assembly import Assembly
from ..devices_general.motors import SmaractStreamdevice
+152 -9
View File
@@ -7,6 +7,149 @@ from matplotlib import pyplot as plt
import numpy as np
from .. import ecocnf
from ..elements.assembly import Assembly
from datahub import DataBuffer, Table, Stdout
class DataHub(Assembly):
def __init__(self, pv_pulse_id=None, name=None, add_to_cnf=False):
super().__init__(name=name)
if pv_pulse_id:
self._append(DetectorPvDataStream, pv_pulse_id, name="pulse_id")
if add_to_cnf:
ecocnf.archiver = self
self._databuffer = None
@property
def databuffer(self):
if self._databuffer is None:
self._databuffer = DataBuffer(backend="sf-databuffer")
return self._databuffer
def get_data(self, channels, start, end, range_type=None):
table = Table()
self.databuffer.add_listener(table)
self.databuffer.request(dict(channels=channels, start=start, end=end))
op = table.as_dataframe()
self.databuffer.remove_listeners()
return op
def get_data_time_range(
self,
channels=[],
start=None,
end=None,
plot=False,
force_type=None,
labels=None,
convert_timezone=False,
**kwargs,
):
if not end:
end = datetime.datetime.now()
if isinstance(start, datetime.timedelta):
start = end + start
elif isinstance(start, dict):
start = datetime.timedelta(**start)
start = end + start
elif isinstance(start, Number):
start = datetime.timedelta(seconds=start)
start = end + start
else:
start = datetime.timedelta(**kwargs)
start = end + start
if force_type:
archive_types = ["CA", "BS"]
if force_type in archive_types:
if force_type == "CA":
channels_req = [f"sf-archiverappliance/{tch}" for tch in channels]
elif force_type == "BS":
channels_req = [f"sf-databuffer/{tch}" for tch in channels]
else:
raise Exception(f"force_type must be one of {archive_types}")
else:
channels_req = channels
if type(start) is str:
start = dateutil.parser.parse(start)
if type(end) is str:
end = dateutil.parser.parse(end)
start = datetime2str(local2utc(start))
end = datetime2str(local2utc(end))
data = self.get_data(channels_req, start=start, end=end, range_type="time")
if convert_timezone:
data.index = data.index.tz_convert("Europe/Zurich")
if plot:
ah = plt.gca()
if not labels:
labels = channels
for chan, label in zip(channels, labels):
sel = ~data[chan].isnull()
if any(sel):
x = data.index[sel]
y = data[chan][sel]
ah.step(x, y, ".-", label=label, where="post")
plt.xticks(rotation=30)
plt.legend()
plt.tight_layout()
plt.xlabel(data.index.name)
ah.figure.tight_layout()
return data
def get_data_pulse_id_range(
self,
channels=[],
start=None,
end=None,
plot=False,
force_type=None,
convert_timezone=False,
labels=None,
):
if not end:
if hasattr(self, "pulse_id"):
end = int(self.pulse_id.get_current_value())
else:
raise Exception("no end pulse id provided")
start = start + end
if force_type:
archive_types = ["CA", "BS"]
if force_type in archive_types:
if force_type == "CA":
channels_req = [f"sf-archiverappliance/{tch}" for tch in channels]
elif force_type == "BS":
channels_req = [f"sf-databuffer/{tch}" for tch in channels]
else:
raise Exception(f"force_type must be one of {archive_types}")
else:
channels_req = channels
data = self.get_data(channels_req, start=start, end=end, range_type="pulseId")
if convert_timezone:
data.index = data.index.tz_convert("Europe/Zurich")
if plot:
ah = plt.gca()
if not labels:
labels = channels
for chan, label in zip(channels, labels):
sel = ~np.isnan(data[chan])
x = data.index[sel]
y = data[chan][sel]
ah.step(x, y, ".-", label=label, where="post")
plt.xticks(rotation=30)
plt.legend()
plt.tight_layout()
plt.xlabel(data.index.name)
ah.figure.tight_layout()
return data
def search(self, searchstring):
"""A search in database using simpler unix glob expressions (e.g. '*ARES*')"""
return search(translate(searchstring))
class DataApi(Assembly):
@@ -61,7 +204,7 @@ class DataApi(Assembly):
start = datetime2str(local2utc(start))
end = datetime2str(local2utc(end))
data = get_data(channels_req, start=start, end=end, range_type="time")
if convert_timezone:
data.index = data.index.tz_convert("Europe/Zurich")
@@ -89,8 +232,8 @@ class DataApi(Assembly):
start=None,
end=None,
plot=False,
force_type=None,
convert_timezone = True,
force_type=None,
convert_timezone=True,
labels=None,
):
if not end:
@@ -134,16 +277,16 @@ class DataApi(Assembly):
def search(self, searchstring):
"""A search in database using simpler unix glob expressions (e.g. '*ARES*')"""
return search(translate(searchstring))
def datetime2str(datetime_date):
return datetime_date.isoformat()
def local2utc(datetime_date):
return datetime_date.replace(
tzinfo=None,
).astimezone(
tz=datetime.timezone.utc,
)
).astimezone(
tz=datetime.timezone.utc,
)
+124 -65
View File
@@ -1,7 +1,7 @@
from cam_server import CamClient, PipelineClient
from matplotlib.backend_bases import MouseButton
from eco.devices_general.utilities import Changer
from eco.epics.detector import DetectorPvData
from eco.epics.detector import DetectorPvData, DetectorPvEnum
from ..aliases import Alias, append_object_to_object
from ..elements.adjustable import AdjustableVirtual, AdjustableGetSet, value_property
@@ -16,6 +16,7 @@ from pathlib import Path
import time
import matplotlib.pyplot as plt
import numpy as np
sys.path.append("/sf/bernina/config/src/python/sf_databuffer/")
import bufferutils
@@ -36,6 +37,7 @@ def get_pipelineclient():
PIPELINE_CLIENT = PipelineClient()
return PIPELINE_CLIENT
@value_property
class CamserverConfig2(Assembly):
def __init__(self, cam_id, camserver_alias=None, name=None, camserver_group=None):
@@ -44,19 +46,40 @@ class CamserverConfig2(Assembly):
self.camserver_alias = camserver_alias
self.camserver_group = camserver_group
self._cross = None
self._append(AdjustableGetSet,
self._get_config,
self._set_config,
cache_get_seconds =.05,
precision=0,
check_interval=None,
name='_config',
is_setting=False,
is_display=False)
self._append(AdjustableObject, self._config, name='config',is_setting=True, is_display='recursive')
self._append(DetectorGet, self._get_info, cache_get_seconds =.05, name='_info', is_setting=False, is_display=False)
self._append(DetectorObject, self._info, name='info', is_display='recursive', is_setting=False)
self._append(
AdjustableGetSet,
self._get_config,
self._set_config,
cache_get_seconds=0.05,
precision=0,
check_interval=None,
name="_config",
is_setting=False,
is_display=False,
)
self._append(
AdjustableObject,
self._config,
name="config",
is_setting=True,
is_display="recursive",
)
self._append(
DetectorGet,
self._get_info,
cache_get_seconds=0.05,
name="_info",
is_setting=False,
is_display=False,
)
self._append(
DetectorObject,
self._info,
name="info",
is_display="recursive",
is_setting=False,
)
@property
def pc(self):
@@ -67,7 +90,7 @@ class CamserverConfig2(Assembly):
return get_camclient()
def _get_config(self):
return self.cc.get_camera_config(self.cam_id)
return self.cc.get_camera_config(self.cam_id)
def _set_config(self, value, hold=False):
return Changer(
@@ -81,7 +104,7 @@ class CamserverConfig2(Assembly):
"camera_geometry": self.cc.get_camera_geometry(self.cam_id),
"pipelines": self._get_pipelines(),
}
return fields
return fields
### convenience functions ###
def get_camera_image(self):
@@ -149,7 +172,8 @@ class CamserverConfig2(Assembly):
subprocess.Popen(line, shell=True)
def gui(self):
self._run_cmd(f'csm')
self._run_cmd(f"csm")
@value_property
class CamserverConfig(Assembly):
@@ -237,7 +261,7 @@ class CamserverConfig(Assembly):
x_um_per_px = 1
y_um_per_px = 1
calib["reference_marker"] = [x - 1, y - 1, x + 1, y + 1]
# calib["reference_marker"] = [x - 1, y - 1, x + 1, y + 1]
calib["reference_marker_width"] = 2 * x_um_per_px
calib["reference_marker_height"] = 2 * y_um_per_px
self.set_config_fields(fields={"camera_calibration": calib})
@@ -279,25 +303,33 @@ class CamserverConfig(Assembly):
class CameraBasler(Assembly):
def __init__(self, pvname, camserver_alias=None, name=None, camserver_group=None):
def __init__(
self,
pvname,
camserver_alias=None,
name=None,
camserver_group=None,
connect_camserver=True,
):
super().__init__(name=name)
self.pvname = pvname
if not camserver_alias:
camserver_alias = self.alias.get_full_name() + f" ({pvname})"
else:
camserver_alias = camserver_alias + f" ({pvname})"
self._append(
CamserverConfig2,
self.pvname,
camserver_alias=camserver_alias,
camserver_group=camserver_group,
name="config_cs",
is_display=False,
)
if connect_camserver:
self._append(
CamserverConfig2,
self.pvname,
camserver_alias=camserver_alias,
camserver_group=camserver_group,
name="config_cs",
is_display=False,
)
self.config_cs.set_alias()
if camserver_group is not None:
self.config_cs.set_group()
self.config_cs.set_alias()
if camserver_group is not None:
self.config_cs.set_group()
self._append(
AdjustablePvEnum,
self.pvname + ":INIT",
@@ -305,12 +337,19 @@ class CameraBasler(Assembly):
is_setting=True,
is_display=False,
)
self._append(
DetectorPvEnum,
self.pvname + ":BUSY_INIT",
name="is_initializing",
is_setting=True,
is_display=True,
)
self._append(
AdjustablePvEnum,
self.pvname + ":CAMERASTATUS",
name="running",
name="cam_status",
is_setting=True,
is_display=False,
is_display=True,
)
self._append(
AdjustablePv,
@@ -403,7 +442,7 @@ class CameraBasler(Assembly):
is_setting=True,
is_display=False,
)
self._append(
DetectorPvData,
self.pvname + ":DEVICEFREQUENCY",
@@ -453,7 +492,7 @@ class CameraBasler(Assembly):
self._exposure_time.get_current_value,
lambda value: self._set_params((self._exposure_time, value)),
name="exposure_time",
unit = 'ms',
unit="ms",
is_setting=True,
is_display=True,
)
@@ -485,24 +524,32 @@ class CameraBasler(Assembly):
)
def _set_params(self, *args):
self.running(1)
self.cam_status(1)
for ob, val in args:
ob(val)
self._set_parameters(1)
self.running(2)
self.cam_status(2)
def get_camera_images(self, n):
imgs=[]
while(len(np.unique(imgs, axis=0))<n):
imgs = []
while len(np.unique(imgs, axis=0)) < n:
imgs.append(self.config_cs.get_camera_image())
return np.unique(imgs, axis=0)
def set_cross(self, x=None, y=None, x_um_per_px=None, y_um_per_px=None, n_images=10):
def set_cross(
self, x=None, y=None, x_um_per_px=None, y_um_per_px=None, n_images=10
):
"""set x and y position of the refetence marker on a camera px/um calibration is conserved if no new value is given"""
def prompt(x,y,x_um_per_px,y_um_per_px):
x=int(x)
y=int(y)
answer = input(f"Set the new cross position [{x}, {y}] with calibration [{x_um_per_px:.3}, {y_um_per_px:.3}] ([y]/n)?") or "y"
def prompt(x, y, x_um_per_px, y_um_per_px):
x = int(x)
y = int(y)
answer = (
input(
f"Set the new cross position [{x}, {y}] with calibration [{x_um_per_px:.3}, {y_um_per_px:.3}] ([y]/n)?"
)
or "y"
)
if answer == "y":
calib.reference_marker([x - 1, y - 1, x + 1, y + 1])
calib.reference_marker_width(2 * x_um_per_px)
@@ -516,51 +563,51 @@ class CameraBasler(Assembly):
print("Current calibration:")
print(calib)
try:
w = calib.reference_marker_width()
h = calib.reference_marker_height()
w = calib.reference_marker_width()
h = calib.reference_marker_height()
rm = calib.reference_marker()
if not x_um_per_px:
x_um_per_px = w / abs(rm[2] - rm[0])
if not y_um_per_px:
y_um_per_px = h / abs(rm[3] - rm[1])
except:
rm=[0,0,0,0]
rm = [0, 0, 0, 0]
x_um_per_px = 1
y_um_per_px = 1
if x is None or y is None:
x = (rm[2] + rm[0])/2
y = (rm[3] + rm[1])/2
x = (rm[2] + rm[0]) / 2
y = (rm[3] + rm[1]) / 2
img = np.mean(self.get_camera_images(n_images), axis=0)
run = True
def on_click(event):
if event.button is MouseButton.LEFT:
x = event.xdata
y = event.ydata
cross_plot.set_data(x,y)
cross_plot.set_data(x, y)
plt.draw()
print(f'cross at x: {x:.4} and y: {y:.4}')
self.config_cs._cross = [x,y]
print(f"cross at x: {x:.4} and y: {y:.4}")
self.config_cs._cross = [x, y]
else:
plt.disconnect(bid)
plt.close(self.config_cs.cam_id)
fig = plt.figure(num=self.config_cs.cam_id)
plt.title(f"Set cross: left mouse click, Finish: right click")
plt.imshow(img)
cross_plot = plt.plot(x,y, '+r', markersize=10)[0]
bid = fig.canvas.mpl_connect('button_press_event', on_click)
cross_plot = plt.plot(x, y, "+r", markersize=10)[0]
bid = fig.canvas.mpl_connect("button_press_event", on_click)
plt.show(block=True)
x, y = self.config_cs._cross
print(x,y)
prompt(x,y,x_um_per_px,y_um_per_px)
print(x, y)
prompt(x, y, x_um_per_px, y_um_per_px)
def gui(self):
self._run_cmd(
f'caqtdm -macro "NAME={self.pvname},CAMNAME={self.pvname}" /sf/controls/config/qt/Camera/CameraExpert.ui'
)
# NB: please note this should be moved to microscopes which are using cameras plus zooms,
class QioptiqMicroscope(CameraBasler):
def __init__(self, pvname_camera, pvname_zoom=None, pvname_focus=None, name=None):
@@ -664,14 +711,26 @@ class CameraPCO(Assembly):
# NB: please note this should be moved to microscopes which are using cameras plus zooms,
class FeturaMicroscope(CameraPCO):
def __init__(self, pvname_camera, pvname_base_zoom=None, name=None, camserver_alias=None):
def __init__(
self, pvname_camera, pvname_base_zoom=None, name=None, camserver_alias=None
):
super().__init__(pvname_camera, name=name, camserver_alias=camserver_alias)
if pvname_base_zoom:
self._append(AdjustablePv, pvsetname=pvname_base_zoom+":POS_SP", pvreadbackname=pvname_base_zoom+":POS_RB", name="_zoom_motor", is_setting=True, is_display=False)
self._append(
AdjustablePv,
pvsetname=pvname_base_zoom + ":POS_SP",
pvreadbackname=pvname_base_zoom + ":POS_RB",
name="_zoom_motor",
is_setting=True,
is_display=False,
)
def getv(v):
return v/10.
return v / 10.0
def setv(v):
return v*10.
self._append(AdjustableVirtual, [self._zoom_motor], getv, setv, name="zoom", unit="%")
return v * 10.0
self._append(
AdjustableVirtual, [self._zoom_motor], getv, setv, name="zoom", unit="%"
)
+3
View File
@@ -2,6 +2,8 @@
from functools import partial
from epics.motor import Motor as _Motor
from epics import PV
from eco.acquisition.scan_data import run_status_convenience
from .utilities import Changer
from ..aliases import Alias
from ..elements.adjustable import (
@@ -888,6 +890,7 @@ class ThorlabsPiezoRecord(Assembly):
@update_changes
@get_from_archive
@value_property
# @run_status_convenience
class MotorRecord(Assembly):
def __init__(
self,
+2 -1
View File
@@ -10,6 +10,7 @@ import colorama
import numpy as np
import eco
from eco.acquisition.scan_data import run_status_convenience
from eco.aliases import Alias
from eco.devices_general.utilities import Changer
@@ -120,7 +121,6 @@ def spec_convenience(Adj):
]
else:
messages = [f"{tname} is at {value}."]
self.mvr(value)
elog.post(*messages, tags=tags)
def mv_elog(self, value, premessage=None, tags=[]):
@@ -554,6 +554,7 @@ class AdjustableFS:
@spec_convenience
@tweak_option
@value_property
@run_status_convenience
class AdjustableVirtual:
def __init__(
self,
+50 -7
View File
@@ -1,3 +1,4 @@
import itertools
from pathlib import Path
from datetime import datetime
from .adjustable import AdjustableFS
@@ -53,6 +54,48 @@ class Memory:
)
self._presets = AdjustableFS(self.dir / Path("presets.json"), default_value={})
def memories(self, indices=None, search_key=None):
self.setup_path()
mem = self._memories()
memkeys = list(mem.keys())
if indices is None:
indices = range(len(mem))
mems = []
for index in indices:
tkey = memkeys[index]
tmem = mem[tkey]
cats = list(itertools.chain.from_iterable(tmem["categories"].values()))
tmem_all = self.get_memory(key=tkey)
if search_key is not None:
tmem_sel = {
tk: {ttk: ttv for ttk, ttv in tv.items() if search_key in ttk}
for tk, tv in tmem_all.items()
if tk in cats
}
else:
tmem_sel = tmem_all
tmem.update(tmem_sel)
mems.append(tmem)
return mems
def plot_parameter(self, parameter_name, group_name="settings"):
mem = self.memories(search_key=parameter_name)
date = []
value = []
message = []
for tmem in mem:
try:
tdate = datetime.fromisoformat(tmem["date"])
tval = tmem[group_name][parameter_name]
tmess = tmem["message"]
date.append(tdate)
value.append(tval)
message.append(tmess)
except:
pass
return date, value, message
def __str__(self):
self.setup_path()
mem = self._memories()
@@ -118,13 +161,13 @@ class Memory:
message = input(
"Please enter a message associated to this memory entry:\n>>> "
)
mem[key] = {
"message": message,
"categories": self.categories,
"date": key,
}
if preset_varname:
mem[key].update({"presetname": preset_varname})
mem[key] = {
"message": message,
"categories": self.categories,
"date": key,
}
if preset_varname:
mem[key].update({"presetname": preset_varname})
tmp = AdjustableFS(self.dir / Path(key + ".json"))
tmp(stat_now)
self._memories(mem)
@@ -1637,7 +1637,7 @@ class LowtemperatureSurfaceDiffraction(Assembly):
self._append(
DetectorPvDataStream,
"SLAAR21-LTIM01-EVR0:CALCZ",
name="interferrometer_value"
name="interferrometer_value",
)
self._append(
@@ -1647,10 +1647,10 @@ class LowtemperatureSurfaceDiffraction(Assembly):
name="illumination",
)
def beam_block_in(self, target=10):
def beam_block_in(self, target=7):
self.beam_block.set_target_value(target)
def beam_block_out(self, target=8):
def beam_block_out(self, target=0):
self.beam_block.set_target_value(target)
def interferrometer_in(self, target=13.35):
+326 -29
View File
@@ -17,8 +17,10 @@ from ..elements.adjustable import (
value_property,
tweak_option,
)
from ..devices_general.cameras_swissfel import CameraBasler
from cam_server import PipelineClient
from eco.devices_general.utilities import Changer
from eco.devices_general.pipelines_swissfel import Pipeline
from ..epics.adjustable import AdjustablePv, AdjustablePvEnum
from ..epics.detector import DetectorPvData, DetectorPvString
from eco.detector.detectors_psi import DetectorBsStream
@@ -39,10 +41,279 @@ ureg = UnitRegistry()
class IncouplingCleanBernina(Assembly):
def __init__(self, name=None):
super().__init__(name=name)
self._append(SmaractRecord, "SARES23-LIC:MOT_16", name="tilt")
self._append(SmaractRecord, "SARES23-LIC:MOT_13", name="rotation")
self._append(SmaractRecord, "SARES23-LIC:MOT_15", name="transl_vertical")
self._append(MotorRecord, "SARES20-MF2:MOT_5", name="transl_horizontal")
self._append(
SmaractRecord,
"SARES23-LIC:MOT_16",
name="tilt",
is_setting=True,
is_display=True,
)
self._append(
SmaractRecord,
"SARES23-LIC:MOT_13",
name="rotation",
is_setting=True,
is_display=True,
)
self._append(
SmaractRecord,
"SARES23-LIC:MOT_15",
name="transl_vertical",
is_setting=True,
is_display=True,
)
self._append(
MotorRecord,
"SARES20-MF2:MOT_5",
name="transl_horizontal",
is_setting=True,
is_display=True,
)
class MidIR(Assembly):
def __init__(
self,
name=None,
pipeline_projection="Bernina_mid_IR_CEP_projection",
pipeline_analysis="Bernina_mid_IR_CEP_analysis",
pipeline_pv_writing="Bernina_mid_IR_CEP_populate_pvs",
spectrometer_pvname="SLAAR21-LCAM-CS841",
):
super().__init__(name=name)
# self._append(
# SmaractRecord,
# "SARES23-USR:MOT_1",
# name="par_ry",
# is_setting=True,
# is_display=True,
# )
# self._append(
# SmaractRecord,
# "SARES23-USR:MOT_6",
# name="par_rx",
# is_setting=True,
# is_display=True,
# )
# self._append(
# SmaractRecord,
# "SARES23-USR:MOT_4",
# name="par_z",
# is_setting=True,
# is_display=True,
# )
# self._append(
# SmaractRecord,
# "SARES23-USR:MOT_5",
# name="par_x",
# is_setting=True,
# is_display=True,
# )
# self._append(
# MotorRecord,
# "SARES20-MF1:MOT_15",
# name="par_y",
# is_setting=True,
# is_display=True,
# )
# self._append(
# MotorRecord,
# "SARES23-USR:MOT_7",
# name="mirr_z",
# is_setting=True,
# is_display=True,
# )
self._append(
MotorRecord,
"SLAAR21-LMTS-SMAR1:MOT_2",
name="wedge_prism",
is_setting=True,
is_display=True,
)
# self._append(
# MotorRecord,
# "SARES23-USR:MOT_2",
# name="power_check",
# is_setting=True,
# is_display=True,
# )
self._append(
MotorRecord,
"SLAAR21-LMOT-M521:MOTOR_1",
name="delaystage_cep",
is_setting=True,
)
self._append(
DelayTime,
self.delaystage_cep,
name="delay_cep",
is_setting=True,
)
self._append(
CameraBasler,
"SLAAR21-LCAM-CS841",
name="camera_spectrometer",
camserver_alias="MIR_CEP",
is_setting=True,
)
self._append(
AdjustablePv,
pvsetname="SLAAR21-SPATTT:AT",
name="feedback_setpoint",
accuracy=10,
is_setting=True,
)
self._append(
AdjustablePv,
pvsetname="SFBEB01-LGEN-MIR_CEP:FB_ON_GLOBAL",
name="feedback_enabled",
accuracy=10,
is_setting=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fit_amplitude",
cachannel="SLAAR21-MIRCEP:AMPLITUDE",
name="fit_amplitude",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fit_phase",
cachannel="SLAAR21-MIRCEP:PHASE",
name="fit_phase",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fit_x0",
cachannel="SLAAR21-MIRCEP:X0",
name="fit_arrival_time",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fit_fwhm",
cachannel="SLAAR21-MIRCEP:FWHM",
name="fit_fwhm",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fit_frequency",
cachannel="SLAAR21-MIRCEP:FREQUENCY",
name="fit_frequency",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:spectrometer_background",
cachannel=None,
name="spectrometer_background",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:spectrometer_signal",
cachannel=None,
name="spectrometer_signal",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:spectrometer_ratio",
cachannel=None,
name="spectrometer_ratio",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:spectrometer_correlation",
cachannel=None,
name="spectrometer_correlation",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:spectrometer_time",
cachannel=None,
name="spectrometer_time",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fft_frequency",
cachannel=None,
name="fft_frequency",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fft_amplitude_abs",
cachannel=None,
name="fft_amplitude_abs",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fft_amplitude_real",
cachannel=None,
name="fft_amplitude_real",
is_setting=False,
is_display=True,
)
self._append(
DetectorBsStream,
"SARES20-CEP01:fft_amplitude_imag",
cachannel=None,
name="fft_amplitude_imag",
is_setting=False,
is_display=True,
)
self.proc_client = PipelineClient()
try:
self.pipeline_projection = pipeline_projection
self._append(
Pipeline,
self.pipeline_projection,
name="pipeline_projection",
is_setting=False,
)
except Exception as e:
print(f"Mid-IR projection pipeline initialization failed with: \n{e}")
try:
self.pipeline_analysis = pipeline_analysis
self._append(
Pipeline,
self.pipeline_analysis,
name="pipeline_analysis",
is_setting=False,
)
except Exception as e:
print(f"Mid-IR analysis pipeline initialization failed with: \n{e}")
try:
self.pipeline_pv_writing = pipeline_pv_writing
self._append(
Pipeline,
self.pipeline_pv_writing,
name="pipeline_pv_writing",
is_setting=False,
)
except Exception as e:
print(f"Timetool pv writing pipeline initialization failed with: \n{e}")
class Spectrometer(Assembly):
@@ -614,6 +885,12 @@ class LaserBernina(Assembly):
self._append(
MotorRecord, self.pvname + "-M534:MOT", name="wp_att", is_setting=True
)
self._append(
MotorRecord,
self.pvname + "-M548:MOT",
name="switch_35to100fs",
is_setting=True,
)
try:
self.motor_configuration_thorlabs = {
"wp_uv": {
@@ -651,14 +928,14 @@ class LaserBernina(Assembly):
filters = np.array(
[
[1, 330],
[0.872863247863248, 15],
[0.692521367521367, 60],
[0.549038461538462, 105],
[0.432051282051282, 150],
[0.333653846153846, 195],
[0.251188643, 240],
[0.111538461538462, 285],
[1, 0],
[0.872863247863248, 45],
[0.692521367521367, 90],
[0.549038461538462, 135],
[0.432051282051282, 180],
[0.333653846153846, 225],
[0.251188643, 270],
[0.111538461538462, 315],
]
)
@@ -789,22 +1066,42 @@ class LaserBernina(Assembly):
name="delay_frog",
is_setting=True,
)
# self._append(
# SmaractRecord,
# "SLAAR21-LMTS-SMAR1:MOT_3",
# name="delaystage_ir",
# is_setting=True,
# )
# self._append(
# DelayTime,
# self.delaystage_frog,
# name="delay_ir",
# is_setting=True,
# )
# self._append(
# AdjustableVirtual, [self.wp_ir], wp2uJ, uJ2wp, name="pulse_energy_ir"
# )
# self._append(
# MotorRecord,
# "SLAAR21-LMOT-M552:MOT",
# name="delaystage_compensation",
# is_setting=True,
# )
self._append(
DelayTime,
self.delaystage_pump,
name="delay_compensation",
is_setting=True,
)
self._append(
MotorRecord,
"SLAAR21-LMOT-M555:MOT",
name="delaystage_m2",
is_setting=True,
)
self._append(
DelayTime,
self.delaystage_pump,
name="delay_m2",
is_setting=True,
)
self._append(
MotorRecord,
"SLAAR21-LMOT-M521:MOTOR_1",
name="delaystage_eos",
is_setting=True,
)
self._append(
DelayTime,
self.delaystage_eos,
name="delay_eos",
is_setting=True,
)
class DelayTime(AdjustableVirtual):
+1
View File
@@ -34,6 +34,7 @@ class TtProcessor:
ix = m.data.pulse_id
prof = m.data.data[self.channel_proj].value
if prof is None:
continue
+62 -14
View File
@@ -1,5 +1,4 @@
from eco.detector.detectors_psi import DetectorBsStream
from eco.epics.detector import DetectorPvDataStream
from eco.devices_general.pipelines_swissfel import Pipeline
from eco.devices_general.spectrometers import SpectrometerAndor
from eco.microscopes.microscopes import FeturaPlusZoom
@@ -258,7 +257,7 @@ class TimetoolBerninaUSD(Assembly):
print(f"Andor spectrometer initialization failed with: \n{e}")
def get_calibration_values(
self, seconds=5, scan_range=.8e-12, plot=False, pipeline=True, to_elog=False
self, seconds=5, scan_range=0.8e-12, plot=False, pipeline=True, to_elog=False
):
t0 = self.delay()
x = np.linspace(t0 - scan_range / 2, t0 + scan_range / 2, 20)
@@ -270,25 +269,25 @@ class TimetoolBerninaUSD(Assembly):
print(f"Moving to {pos*1e15} fs")
self.delay.set_target_value(pos).wait()
if pipeline:
#needed due to delay of data arrival
# needed due to delay of data arrival
sleep(5)
ys = self.edge_position_px.acquire(seconds=seconds).wait()
y.append(ys)
ymean.append(np.mean(ys))
yerr.append(np.std(ys)/np.sqrt(len(ys)))
yerr.append(np.std(ys) / np.sqrt(len(ys)))
except Exception as e:
print(e)
print(f"Moving back to inital value of {t0}")
self.delay.set_target_value(t0)
p = np.polyfit(ymean, x, 2, w=1/np.array(yerr))
p = np.polyfit(ymean, x, 2, w=1 / np.array(yerr))
fpath = ""
if plot:
binmin = np.min([np.min(step) for step in y])
binmax = np.max([np.max(step) for step in y])
bins = np.arange(binmin, binmax, 1)
bins_center = bins[:-1]+0.5
hists = np.array([np.histogram(step, bins=bins)[0]for step in y]).T
bins_center = bins[:-1] + 0.5
hists = np.array([np.histogram(step, bins=bins)[0] for step in y]).T
plt.close("tt_calib")
fig = plt.figure("tt_calib")
plt.pcolor(x, bins_center, hists)
@@ -305,8 +304,8 @@ class TimetoolBerninaUSD(Assembly):
if to_elog:
try:
msg = f"<h1>Timetool calibration results:</h1>\n"
msg+= f"Polynomial fit c0*edge_pos(px)^2 + c1*edge_pos(px) + c2:\n {p} \n\n"
msg+= self.target_stages.__repr__()
msg += f"Polynomial fit c0*edge_pos(px)^2 + c1*edge_pos(px) + c2:\n {p} \n\n"
msg += self.target_stages.__repr__()
elog = self._get_elog()
elog.post(msg.replace("\n", "<br>"), fpath)
except Exception as e:
@@ -321,7 +320,7 @@ class TimetoolBerninaUSD(Assembly):
old_calib = self.pipeline_edgefinding.config.calibration()
self.pipeline_edgefinding.config.calibration.set_target_value(p).wait()
msg = f"Updated timetool processing pipeline calibration:\n"
msg+= f"old values: {old_calib} \nnew values: {p}"
msg += f"old values: {old_calib} \nnew values: {p}"
else:
self.calibration.const_E.set_target_value(p[0]).wait()
self.calibration.const_F.set_target_value(p[1]).wait()
@@ -334,7 +333,10 @@ class TimetoolBerninaUSD(Assembly):
except Exception as e:
print(f"Elog posting failed with:\n {e}")
def calibrate(self, seconds=5, scan_range=1e-12, plot=True, pipeline=True, to_elog=True):
def calibrate(
self, seconds=5, scan_range=1e-12, plot=True, pipeline=True, to_elog=True
):
feedback = self.feedback_enabled()
t0 = self.delay()
if abs(t0) > 50e-15:
ans = ""
@@ -347,10 +349,20 @@ class TimetoolBerninaUSD(Assembly):
continue
if ans == "n":
return
if feedback:
self.feedback_enabled(0)
print("Feedback turned off")
p, ys = self.get_calibration_values(
seconds=seconds, scan_range=scan_range, plot=plot, to_elog=to_elog, pipeline=pipeline
seconds=seconds,
scan_range=scan_range,
plot=plot,
to_elog=to_elog,
pipeline=pipeline,
)
self.set_calibration_values(p, pipeline=pipeline, to_elog=to_elog)
if feedback:
self.feedback_enabled(1)
print("Feedback turned on")
def get_online_data(self):
self.online_monitor = TtProcessor()
@@ -363,6 +375,31 @@ class TimetoolBerninaUSD(Assembly):
print(f"... done, starting online plot.")
self.online_monitor.plot_animation()
def start_camera_restarter(self):
print(f"Starting camera restarter ...")
from time import sleep
while True:
sleep(1)
try:
tx = float(self.pipeline_projection.info.statistics.tx().split("Hz")[0])
except Exception as e:
print(f"Could not read projection pipeline tx")
print(e)
if tx < 10:
try:
self.camera_spectrometer.config_cs.stop()
except Exception as e:
print(e)
try:
self._get_elog().post(
f"<h1>tx was {tx}: Automatically restarted timetool camera</h1>"
)
print(f"tx was {tx}Hz: Automatically restarted timetool camera")
except Exception as e:
print(e)
sleep(120)
def get_proc_config(self):
return self.proc_client.get_pipeline_config(self.proc_pipeline)
@@ -392,16 +429,27 @@ class TimetoolBerninaUSD(Assembly):
fs_pv = PV(self.edge_position_fs.pvname)
px_pv = PV(self.edge_position_px.pvname)
mx_pv = PV(self.edge_amplitude.pvname)
with source(channels=[self.edge_position_fs.bs_channel, self.edge_position_px.bs_channel, self.edge_amplitude.bs_channel]) as s:
with source(
channels=[
self.edge_position_fs.bs_channel,
self.edge_position_px.bs_channel,
self.edge_amplitude.bs_channel,
]
) as s:
while True:
d = s.receive()
fs, px, mx = [[d.data.data[self.edge_position_fs.bs_channel].value], d.data.data[self.edge_position_px.bs_channel].value, d.data.data[self.edge_amplitude.bs_channel].value]
fs, px, mx = [
[d.data.data[self.edge_position_fs.bs_channel].value],
d.data.data[self.edge_position_px.bs_channel].value,
d.data.data[self.edge_amplitude.bs_channel].value,
]
if not fs:
continue
fs_pv.put(fs)
px_pv.put(px)
mx_pv.put(mx)
class DelayTime(AdjustableVirtual):
def __init__(
self, stage, direction=1, passes=2, reset_current_value_to=True, name=None
+109 -46
View File
@@ -81,7 +81,7 @@ class Crystals(Assembly):
self._append(
AdjustableFS,
f"/photonics/home/gac-bernina/eco/configuration/crystals/{name}_list",
name="crystal_list",
name="_crystal_list",
default_value={},
is_setting=True,
)
@@ -119,7 +119,7 @@ class Crystals(Assembly):
is_setting=False,
is_display="recursive",
)
for key, meta in self.crystal_list().items():
for key, meta in self._crystal_list().items():
if self.diffractometer.name in meta:
self._append(
DiffGeometryYou,
@@ -156,7 +156,7 @@ class Crystals(Assembly):
)
in_name = np.array(
[s in name for s in specials]
+ [s == name for s in list(self.crystal_list().keys())]
+ [s == name for s in list(self._crystal_list().keys())]
)
if name == "":
print(f"Name cannot be empty.")
@@ -166,7 +166,7 @@ class Crystals(Assembly):
)
elif np.any(in_name[len(specials) :]):
print(
f"Name {np.array(list(self.crystal_list().keys()))[in_name[len(specials):]]} already exists."
f"Name {np.array(list(self._crystal_list().keys()))[in_name[len(specials):]]} already exists."
)
else:
break
@@ -179,17 +179,17 @@ class Crystals(Assembly):
is_setting=True,
is_display=False,
)
crystals = self.crystal_list()
crystals = self._crystal_list()
crystals[name] = [str(datetime.now()), self.diffractometer.name]
self.crystal_list.mv(crystals)
self._crystal_list.mv(crystals)
self.__dict__[name].new_ub()
def delete_crystal(self, name=None):
"""
Delete crystal with a given name, deletes also the files.
"""
crystal_names = list(self._crystal_list().keys())
if name == None:
crystal_names = list(self.crystal_list().keys())
input_message = "Select the crystal to delete:\nq) quit\n"
for index, crystal in enumerate(crystal_names):
input_message += f"{index:2}) {crystal:15}\n"
@@ -206,18 +206,21 @@ class Crystals(Assembly):
continue
print(f"Selected crystal: {crystal_names[idx]}")
name = crystal_names[idx]
elif not name in crystal_names:
print(f"Crystal {name} has not been defined.")
return
sure = "n"
sure = input(
f"are you sure you want to permanently remove the crystal {name} and its UB matrix and memories (y/n)? "
)
if sure == "y":
crystals = self.crystal_list()
crystals = self._crystal_list()
meta = crystals[name]
if self.diffractometer.name in meta:
self.deactivate_crystal(name=name)
removed = crystals.pop(name)
del removed
self.crystal_list.mv(crystals)
self._crystal_list.mv(crystals)
attrs = [
"unit_cell",
"u_matrix",
@@ -233,21 +236,25 @@ class Crystals(Assembly):
os.remove(
f"/photonics/home/gac-bernina/eco/configuration/crystals/{name}_{a}"
)
print(f"Deleted crystal {name}.")
else:
print(f"Aborted deletion of crystal {name}.")
def activate_crystal(self, name=None):
crystals = self.crystal_list()
crystals = self._crystal_list()
inactive_crystals = [
k for k in crystals.keys() if not self.diffractometer.name in crystals[k]
]
active_crystals = [
k for k in crystals.keys() if self.diffractometer.name in crystals[k]
]
if name == None:
inactive_crystals = [
k
for k in crystals.keys()
if not self.diffractometer.name in crystals[k]
]
input_message = "Select the crystal to activate:\nq) quit\n"
for index, crystal in enumerate(inactive_crystals):
input_message += f"{index:2}) {crystal:15}\n"
idx = ""
input_message += "Your choice: "
while idx not in range(len(crystal_names)):
while idx not in range(len(inactive_crystals)):
idx = input(input_message)
if idx == "q":
return
@@ -256,8 +263,18 @@ class Crystals(Assembly):
idx = int(idx)
except:
continue
print(f"Activated crystal: {inactive_crystals[idx]}")
name = inactive_crystals[idx]
elif name in active_crystals:
print(
f"Crystal {name} is already active. Use diffcalc.{name} to start calculations."
)
return
elif name not in inactive_crystals:
print(
f"Crystal {name} has not been defined yet. Use diffcalc.create_crystal() to create a new crystal."
)
return
self._append(
DiffGeometryYou,
diffractometer_you=self.diffractometer,
@@ -270,10 +287,17 @@ class Crystals(Assembly):
if not self.diffractometer.name in meta:
meta = meta + [self.diffractometer.name]
crystals[name] = meta
self.crystal_list.mv(crystals)
self._crystal_list.mv(crystals)
print(f"Activated crystal: {name}")
def deactivate_crystal(self, name=None):
crystals = self.crystal_list()
crystals = self._crystal_list()
inactive_crystals = [
k for k in crystals.keys() if not self.diffractometer.name in crystals[k]
]
active_crystals = [
k for k in crystals.keys() if self.diffractometer.name in crystals[k]
]
if name == None:
active_crystals = [
k for k in crystals.keys() if self.diffractometer.name in crystals[k]
@@ -283,7 +307,7 @@ class Crystals(Assembly):
input_message += f"{index:2}) {crystal:15}\n"
idx = ""
input_message += "Your choice: "
while idx not in range(len(crystal_names)):
while idx not in range(len(active_crystals)):
idx = input(input_message)
if idx == "q":
return
@@ -294,15 +318,22 @@ class Crystals(Assembly):
continue
print(f"Selected crystal: {active_crystals[idx]}")
name = active_crystals[idx]
elif name in inactive_crystals:
print(f"Crystal {name} is already inactive.")
return
elif name not in active_crystals:
print(f"Crystal {name} has not been defined, yet.")
return
meta = crystals[name]
if self.diffractometer.name in meta:
i = meta.index(self.diffractometer.name)
meta.pop(i)
crystals[name] = meta
self.crystal_list.mv(crystals)
self._crystal_list.mv(crystals)
removed = self.__dict__.pop(name)
self.alias.pop_object(removed.alias)
del removed
print(f"Deactivated crystal: {name}")
class DiffGeometryYou(Assembly):
@@ -382,15 +413,15 @@ class DiffGeometryYou(Assembly):
adjs = ["gamma", "mu", "delta", "eta_kap", "kappa", "phi_kap"]
self._diff_adjs = {}
self._diff_adjs_constrained = {}
for adj in adjs:
if adj in adjustables_dict.keys():
if adjustables_dict[adj].__class__ is not DetectorVirtual:
self._diff_adjs[adj] = adjustables_dict[adj]
else:
self._diff_adjs[adj] = self.constraints.__dict__[adj]
self._diff_adjs_constrained[adj] = self.constraints.__dict__[adj]
else:
self._diff_adjs[adj] = self.constraints.__dict__[adj]
print(adj)
self._diff_adjs_constrained[adj] = self.constraints.__dict__[adj]
self.diffractometer._append(
DetectorVirtual,
[self.constraints.__dict__[adj]],
@@ -421,17 +452,29 @@ class DiffGeometryYou(Assembly):
def get_hkl(*args, **kwargs):
return self.calc_hkl()
def set_hkl(val):
return self._calc_angles_unique_diffractometer(val)
self._append(
AdjustableVirtual, list(self._diff_adjs.values()), get_h, set_h, name="h"
AdjustableVirtual,
list(self._diff_adjs.values()),
get_h,
set_h,
name="h",
change_simultaneously=True,
)
self._append(
AdjustableVirtual, list(self._diff_adjs.values()), get_k, set_k, name="k"
AdjustableVirtual,
list(self._diff_adjs.values()),
get_k,
set_k,
name="k",
change_simultaneously=True,
)
self._append(
AdjustableVirtual, list(self._diff_adjs.values()), get_l, set_l, name="l"
AdjustableVirtual,
list(self._diff_adjs.values()),
get_l,
set_l,
name="l",
change_simultaneously=True,
)
self._append(
AdjustableVirtual,
@@ -469,9 +512,15 @@ class DiffGeometryYou(Assembly):
return gamma, mu, delta, eta, chi, phi
def get_diffractometer_angles(self):
### assume that all angles exist in diffractometer at least as manual adjustable ###
### assume that all angles exist in self._diff_adjs ###
gamma, mu, delta, eta, chi, phi = self.convert_to_you(
**{key: adj() for key, adj in self._diff_adjs.items()}
**{
key: adj()
for key, adj in {
**self._diff_adjs,
**self._diff_adjs_constrained,
}.items()
}
)
return mu, delta, gamma, eta, chi, phi
@@ -486,16 +535,11 @@ class DiffGeometryYou(Assembly):
target_dict = self.convert_from_you(**kwargs)
for axname, target_value in target_dict.items():
adj = self._diff_adjs[axname]
if (
adj not in self.constraints.__dict__.values()
): ### make sure it is a real adjustable of the diffractometer and not a constraint
if hasattr(adj, "get_limits"):
lim_low, lim_high = adj.get_limits()
in_lims.append(
(lim_low < target_value) and (target_value < lim_high)
)
else:
raise Exception(f"Failed to get limits of adjustable {adj.name}")
if hasattr(adj, "get_limits"):
lim_low, lim_high = adj.get_limits()
in_lims.append((lim_low < target_value) and (target_value < lim_high))
else:
raise Exception(f"Failed to get limits of adjustable {adj.name}")
return all(in_lims)
def new_ub(self):
@@ -589,7 +633,9 @@ class DiffGeometryYou(Assembly):
def add_reflection(
self,
hkl,
h,
k,
l,
mu=None,
delta=None,
gamma=None,
@@ -599,14 +645,17 @@ class DiffGeometryYou(Assembly):
energy=None,
tag=None,
):
"""Add a reference reflection.
"""
Example: add_reflection(2,2,0)
Add a reference reflection.
Adds a reflection position in degrees and in the systems internal
representation.
Parameters
----------
hkl : Tuple[float, float, float]
h, k, l : float
hkl index of the reflection
mu, delta, gamma, eta, chi, phi: float
diffractometer angles in degrees, if not given, the current diffractometer angles are used
@@ -615,6 +664,10 @@ class DiffGeometryYou(Assembly):
tag : Optional[str], default = None
identifying tag for the reflection
"""
hkl = [h, k, l]
if np.any([hasattr(i, "__len__") for i in [h, k, l]]):
print("Please enter hkl as touple: add_reflection(h, k, l)")
return
setvals = [mu, delta, gamma, eta, chi, phi]
curvals = self.get_diffractometer_angles()
angs = [
@@ -663,6 +716,12 @@ class DiffGeometryYou(Assembly):
tag : str
identifying tag for the reflection
"""
if not hasattr(hkl, "__len__"):
print("Please enter hkl as touple or list: add_orientation([h, k, l])")
return
if not hasattr(xyz, "__len__"):
print("Please enter xyz as touple or list: add_orientation([x, y, z])")
return
orientations = self.orientations() + [
{"hkl": hkl, "xyz": xyz, "position": position, "tag": tag}
]
@@ -953,7 +1012,11 @@ class DiffGeometryYou(Assembly):
if energy is None:
energy = self.get_energy()
lam = self.en2lam(energy)
cons = Constraints(self.constraints._base_dict())
constraints_dict = {
"nu" if k == "gamma" else k: v
for k, v in self.constraints._base_dict().items()
}
cons = Constraints(constraints_dict)
hklcalc = HklCalculation(self.ubcalc, cons)
try:
hkl = hklcalc.get_hkl(pos=pos, wavelength=lam)
+133 -149
View File
@@ -6,6 +6,7 @@ from ..elements.adjustable import AdjustableFS
from ..elements.memory import Memory
from subprocess import call
from eco.utilities.config import Proxy
from eco.bernina import namespace
warnings.simplefilter(action="ignore", category=pd.errors.PerformanceWarning)
warnings.simplefilter(action="ignore", category=UserWarning)
@@ -283,12 +284,13 @@ class Run_Table2:
self,
data=None,
exp_id="no_exp_id",
exp_path="runtable",
exp_path="runtable/",
keydf_fname=None,
cred_fname=None,
devices=None,
name=None,
gsheet_key_path=None,
parse=True,
):
self._data = Run_Table_DataFrame(
data=data,
@@ -296,6 +298,7 @@ class Run_Table2:
exp_path=exp_path,
devices=devices,
name=name,
parse=parse,
)
if np.all([k is not None for k in [keydf_fname, cred_fname, gsheet_key_path]]):
@@ -316,20 +319,42 @@ class Run_Table2:
runno,
metadata,
d={},
wait=False,
):
ar = threading.Thread(target=self._append_run, args=(runno, metadata,), kwargs={"d": d})
ar.start()
if wait:
ar.join()
def _append_run(
self,
runno,
metadata,
d={},
):
self._data.append_run(runno, metadata, d=d)
if self._google_sheet_api is not None:
df = self._reduce_df()
self._google_sheet_api.upload_all(df=df)
self._google_sheet_api._upload_all(df=df)
def append_pos(
self,
name,
wait=False,
):
ar = threading.Thread(target=self._append_pos, args=(name,))
ar.start()
if wait:
ar.join()
def _append_pos(
self,
name,
):
self._data.append_pos(name)
if self._google_sheet_api is not None:
df = self._reduce_df()
self._google_sheet_api.upload_all(df=df)
self._google_sheet_api._upload_all(df=df)
def to_dataframe(self):
return DataFrame(self._data)
@@ -426,18 +451,21 @@ class Run_Table_DataFrame(DataFrame):
exp_path=None,
devices=None,
name=None,
parse=True,
):
if type(data) is str:
data = pd.read_pickle(data)
super().__init__(data=data)
### Load devices to parse for adjustables ###
if devices is not None:
devices = eco.__dict__[devices]
if type(devices) == str:
import importlib
devices = importlib.import_module(devices)
self.devices = devices
self.name = name
self.fname = exp_path + f"{exp_id}_runtable.pkl"
self.load()
self.parse = parse
### dicts holding adjustables and bad (not connected) adjustables ###
self.adjustables = {}
@@ -526,22 +554,17 @@ class Run_Table_DataFrame(DataFrame):
d={},
):
self.load()
if len(self.adjustables) == 0:
if np.all([len(self.ids_parsed) == 0, self.parse]):
self._parse_parent()
dat = self._get_adjustable_values(d=d)
dat["metadata"] = metadata
dat["metadata"]["time"] = datetime.now()
names = ["device", "adjustable"]
multiindex = pd.MultiIndex.from_tuples(
[(dev, adj) for dev in dat.keys() for adj in dat[dev].keys()], names=names
)
dat["metadata.time"] = datetime.now()
dat.update({"metadata."+k:v for k, v in metadata.items()})
values = np.array(
[val for adjs in dat.values() for val in adjs.values()], dtype=object
list(dat.values()), dtype=object
)
index = np.array(
[f"{dev}.{adj}" for dev, adjs in dat.items() for adj in adjs.keys()]
list(dat.keys())
)
# run_df = DataFrame([values], columns=multiindex, index=[runno])
run_df = DataFrame([values], columns=index, index=[runno])
# deprecated: self.df = self.append(run_df)
self.df = pd.concat([self.df, run_df])
@@ -552,7 +575,7 @@ class Run_Table_DataFrame(DataFrame):
def append_pos(self, name="", d={}):
self.load()
if len(self.adjustables) == 0:
if np.all([len(self.ids_parsed) == 0, self.parse]):
self._parse_parent()
try:
posno = (
@@ -561,18 +584,13 @@ class Run_Table_DataFrame(DataFrame):
except:
posno = 0
dat = self._get_adjustable_values(d=d)
dat["metadata"] = {"time": datetime.now(), "name": name, "type": "pos"}
names = ["device", "adjustable"]
multiindex = pd.MultiIndex.from_tuples(
[(dev, adj) for dev in dat.keys() for adj in dat[dev].keys()], names=names
)
dat.update({"metadata.time": datetime.now(), "metadata.name": name, "metadata.type": "pos"})
values = np.array(
[val for adjs in dat.values() for val in adjs.values()], dtype=object
list(dat.values()), dtype=object
)
index = np.array(
[f"{dev}.{adj}" for dev, adjs in dat.items() for adj in adjs.keys()]
list(dat.keys())
)
# pos_df = DataFrame([values], columns=multiindex, index=[f"p{posno}"])
pos_df = DataFrame([values], columns=index, index=[f"p{posno}"])
# deprecated: self.df = self.append(pos_df)
@@ -581,18 +599,18 @@ class Run_Table_DataFrame(DataFrame):
# self.order_df()
self.save()
def _get_adjustable_values(self, silent=False, d={}, by_id=True):
def _get_adjustable_values(self, silent=False, d={}, by_id=True, multiindex=False):
"""
This function gets the values of all adjustables in good adjustables and raises an error, when an adjustable is not connected anymore
"""
dat = {}
if by_id:
if self.parse:
for aid, adict in self.ids_parsed.items():
if aid in self.ids_bad:
continue
if not "value" in adict.keys():
continue
## try getting the value from the dict passes from the status
## try getting the value from the dict passed from the status
v = None
for name in adict["names"]:
if "bernina." + name in d.keys():
@@ -607,6 +625,7 @@ class Run_Table_DataFrame(DataFrame):
)
self.ids_bad.append(aid)
continue
if multiindex:
for name in adict["names"]:
devname = name.split(".")[0]
adjname = name[len(devname) + 1 :]
@@ -614,45 +633,28 @@ class Run_Table_DataFrame(DataFrame):
dat[devname].update({adjname: v})
else:
dat.update({devname: {adjname: v}})
else:
for name in adict["names"]:
dat[name]=v
else:
if silent:
for devname, dev in self.good_adjustables.items():
dat[devname] = {}
bad_adjs = []
for adjname, adj in dev.items():
if f"{devname}.{adjname}" in d.keys():
dat[devname][adjname] = d[f"{devname}.{adjname}"]
# print(f"{devname}.{adjname}")
continue
try:
dat[devname][adjname] = adj.get_current_value()
except:
print(
f"run_table: getting value of {devname}.{adjname} failed, removing it from list of good adjustables"
)
bad_adjs.append(adjname)
for ba in bad_adjs:
if not devname in self.bad_adjustables.keys():
self.bad_adjustables[devname] = {}
self.bad_adjustables[devname][ba] = self.good_adjustables[
devname
].pop(ba)
if len(d)==0:
st = namespace.get_status(base=None)
d = st["status"]
d.update(st["settings"])
if multiindex:
for name in d.keys():
devname = name.split(".")[0]
adjname = name[len(devname) + 1 :]
if devname in dat.keys():
dat[devname].update({adjname: v})
else:
dat.update({devname: {adjname: v}})
else:
dat = {
devname: {
adjname: (
d[f"{devname}.{adjname}"]
if f"{devname}.{adjname}" in d.keys()
else adj.get_current_value()
)
for adjname, adj in dev.items()
}
for devname, dev in self.good_adjustables.items()
}
dat = {k[len(k.split(".")[0])+1:] : v for k,v in d.items()}
return dat
def _get_all_adjustables(
self, device, adj_prefix=None, parent_name=None, verbose=False
self, device, adj_prefix=None, parent_name=None, verbose=False, exclude_keys=[], adjustable_exclude_class_types=[], foo_get_current_value="get_current_value"
):
if verbose:
print(f"\nparsing children of {parent_name}")
@@ -677,17 +679,17 @@ class Run_Table_DataFrame(DataFrame):
self.ids_parsed[adj_id]["names"].append(k)
return
for key in device.__dict__.keys():
if ~np.any([s in key for s in self._parse_exclude_keys]):
if ~np.any([s in key for s in exclude_keys]):
value = device.__dict__[key]
if np.all(
[
~np.any(
[
s in str(type(value))
for s in self._adj_exclude_class_types
for s in adjustable_exclude_class_types
]
),
hasattr(value, "get_current_value"),
hasattr(value, foo_get_current_value),
]
):
## create device entry only if it has adjustables
@@ -696,59 +698,45 @@ class Run_Table_DataFrame(DataFrame):
self.ids_parsed[id(device)]["ids"] = []
else:
self.ids_parsed.update({id(device): {"ids": []}})
# if parent_name == name:
# k = key
# else:
k = ".".join([name, key])
self.adjustables[parent_name][k] = value
if id(value) in self.ids_parsed.keys():
if "value" in self.ids_parsed[id(value)]:
self.ids_parsed[id(device)]["ids"].append(id(value))
self.ids_parsed[id(value)]["names_parent"].append(name)
# self.ids_parsed[id(value)]["names"].append(".".join([parent_name, k]))
self.ids_parsed[id(value)]["names"].append(k)
continue
self.ids_parsed[id(device)]["ids"].append(id(value))
self.ids_parsed[id(value)] = {}
self.ids_parsed[id(value)]["names_parent"] = [name]
self.ids_parsed[id(value)]["name"] = key
# self.ids_parsed[id(value)]["names"] = [".".join([parent_name, k])]
self.ids_parsed[id(value)]["names"] = [k]
self.ids_parsed[id(value)]["value"] = value
if parent_name == name:
## only a fix to record get_current_values() of top level devices
if hasattr(device, "get_current_value"):
## create device entry only if it has adjustables
if id(device) in self.ids_parsed.keys():
if not "ids" in self.ids_parsed[id(device)].keys():
self.ids_parsed[id(device)]["ids"] = []
else:
self.ids_parsed.update({id(device): {"ids": []}})
self.adjustables[parent_name]["self"] = device
self.ids_parsed[id(device)]["ids"].append(id(device))
self.ids_parsed[id(device)]["names_parent"] = [name]
self.ids_parsed[id(device)]["name"] = "self"
self.ids_parsed[id(device)]["names"] = [".".join([name, "self"])]
self.ids_parsed[id(device)]["value"] = device
#if parent_name == name:
## only a fix to record get_current_values() of top level devices
#if hasattr(device, foo_get_current_value):
# ## create device entry only if it has adjustables
# if id(device) in self.ids_parsed.keys():
# if not "ids" in self.ids_parsed[id(device)].keys():
# self.ids_parsed[id(device)]["ids"] = []
# else:
# self.ids_parsed.update({id(device): {"ids": []}})
# self.ids_parsed[id(device)]["ids"].append(id(device))
# self.ids_parsed[id(device)]["names_parent"] = [name]
# self.ids_parsed[id(device)]["name"] = "value"
# self.ids_parsed[id(device)]["names"] = [".".join([name, "value"])]
# self.ids_parsed[id(device)]["value"] = device
def _parse_child_instances(
self, parent_class, adj_prefix=None, parent_name=None, verbose=False
self, parent_class, adj_prefix=None, parent_name=None, verbose=False, exclude_keys=[], parse_exclude_class_types=[], adjustable_exclude_class_types=[], is_eco=True, foo_get_current_value="get_current_value"
):
# check if the parent_class was already parsed in its parents
if adj_prefix is not None:
if parent_class.name in adj_prefix:
return []
self._get_all_adjustables(
parent_class, adj_prefix, parent_name, verbose=verbose
parent_class, adj_prefix, parent_name, verbose=verbose, exclude_keys=exclude_keys, adjustable_exclude_class_types=adjustable_exclude_class_types, foo_get_current_value=foo_get_current_value
)
## The lines below cause recursion problems because the parent class name is never added to adj_prefix, resulting in names such as rob.joint.j1.spherical.x
# if parent_name is not parent_class.name:
# if adj_prefix is not None:
## Changed that because sometimes the name is not the class name, which breaks parsing
# adj_prefix = ".".join([adj_prefix, parent_class.name])
# else:
# adj_prefix = parent_class.name
if adj_prefix is not None:
adj_prefix = ".".join([adj_prefix, parent_class.name])
else:
@@ -756,18 +744,22 @@ class Run_Table_DataFrame(DataFrame):
sub_classes = []
for key in parent_class.__dict__.keys():
if ~np.any([s in key for s in self._parse_exclude_keys]):
if ~np.any([s in key for s in exclude_keys]):
s_class = parent_class.__dict__[key]
if np.all(
[
hasattr(s_class, "name"),
hasattr(s_class, "__dict__"),
s_class.__hash__ is not None,
reqs = []
if is_eco:
reqs = [
"eco" in str(s_class.__class__),
]
if np.all(
reqs +
[
hasattr(s_class, "__dict__"),
s_class.__hash__ is not None,
~np.any(
[
s in str(s_class.__class__)
for s in self._parse_exclude_class_types
for s in parse_exclude_class_types
]
),
]
@@ -778,43 +770,60 @@ class Run_Table_DataFrame(DataFrame):
for s in ".".join([parent_name, adj_prefix]).split(".")
]
):
if s_class.name == None:
s_class_name = None
if hasattr(s_class, "name"):
s_class_name = s_class.name
if s_class_name == None:
s_class.name = key
sub_classes.append(s_class)
return set(sub_classes).union(
[
s
for c in sub_classes
for s in self._parse_child_instances(c, adj_prefix, parent_name)
for s in self._parse_child_instances(c, adj_prefix, parent_name, exclude_keys=exclude_keys, parse_exclude_class_types=parse_exclude_class_types, adjustable_exclude_class_types=adjustable_exclude_class_types, is_eco=is_eco, foo_get_current_value=foo_get_current_value)
]
)
def _parse_parent(self, parent=None, verbose=False):
def _parse_parent(self, parent=None, verbose=False, exclude_keys=[], parse_exclude_class_types=[], adjustable_exclude_class_types=[], is_eco=True, foo_get_current_value="get_current_value"):
if len(exclude_keys) == 0:
exclude_keys = self._parse_exclude_keys
if len(parse_exclude_class_types) == 0:
parse_exclude_class_types = self._parse_exclude_class_types
if len(adjustable_exclude_class_types) == 0:
adjustable_exclude_class_types = self._adj_exclude_class_types
self.ids_parsed = {}
if parent == None:
parent = self.devices
self.ids_parsed[id(parent)] = {"ids": []}
for key in parent.__dict__.keys():
try:
if ~np.any([s in key for s in self._parse_exclude_keys]):
if ~np.any([s in key for s in exclude_keys]):
s_class = parent.__dict__[key]
reqs = []
if is_eco:
reqs = [
"eco" in str(s_class.__class__),
]
if np.all(
reqs +
[
hasattr(s_class, "name"),
hasattr(s_class, "__dict__"),
s_class.__hash__ is not None,
"eco" in str(s_class.__class__),
~np.any(
[
s in str(s_class.__class__)
for s in self._parse_exclude_class_types
for s in parse_exclude_class_types
]
),
]
):
self.adjustables[key] = {}
s_class_name = None
if hasattr(s_class, "name"):
s_class_name = s_class.name
if s_class_name == None:
s_class.name = key
self._parse_child_instances(
s_class, parent_name=key, verbose=verbose
s_class, parent_name=key, verbose=verbose, exclude_keys=exclude_keys, parse_exclude_class_types=parse_exclude_class_types, adjustable_exclude_class_types=adjustable_exclude_class_types, is_eco=is_eco, foo_get_current_value=foo_get_current_value
)
except Exception as e:
print(e)
@@ -822,44 +831,19 @@ class Run_Table_DataFrame(DataFrame):
self._check_adjustables()
def _check_adjustables(self, check_for_current_none_values=True, by_id=True):
if by_id:
self.ids_bad = []
for aid, adict in self.ids_parsed.items():
if "value" in adict.keys():
try:
v = adict["value"].get_current_value()
except Exception as e:
self.ids_bad.append(aid)
print(
f"get_current_value() method of {adict['names']} failed with {e}"
)
continue
if check_for_current_none_values and v is None:
self.ids_bad.append(aid)
else:
good_adj = {}
bad_adj = {}
for device, adjs in self.adjustables.items():
good_dev_adj = {}
bad_dev_adj = {}
for name, adj in adjs.items():
try:
adj.get_current_value()
except Exception as e:
print(f"get_current_value() method of {name} failed with {e}")
continue
if check_for_current_none_values and (
adj.get_current_value() is None
):
bad_dev_adj[name] = adj
else:
good_dev_adj[name] = adj
if len(good_dev_adj) > 0:
good_adj[device] = good_dev_adj
if len(bad_dev_adj) > 0:
bad_adj[device] = bad_dev_adj
self.good_adjustables = good_adj
self.bad_adjustables = bad_adj
self.ids_bad = []
for aid, adict in self.ids_parsed.items():
if "value" in adict.keys():
try:
v = adict["value"].get_current_value()
except Exception as e:
self.ids_bad.append(aid)
print(
f"get_current_value() method of {adict['names']} failed with {e}"
)
continue
if check_for_current_none_values and v is None:
self.ids_bad.append(aid)
def _orderlist(self, mylist, key_order, orderlist=None):
key_order = key_order.split(" ")
@@ -893,7 +877,7 @@ class Run_Table_DataFrame(DataFrame):
def check_timeouts(
self, include_bad_adjustables=True, repeats=1, plot=True, verbose=True
):
if len(self.adjustables) == 0:
if np.all([len(self.ids_parsed) == 0, self.parse]):
self._parse_parent(verbose=verbose)
ts = []
devs = []
+3 -3
View File
@@ -1,7 +1,7 @@
from eco.detector.detectors_psi import DetectorBsStream
from ..devices_general.motors import MotorRecord, SmaractStreamdevice, SmaractRecord
from ..devices_general.detectors import CameraCA, CameraBS
from ..devices_general.cameras_swissfel import CameraBasler
from ..devices_general.cameras_swissfel import CameraBasler, CameraPCO
from ..aliases import Alias
from ..elements.adjustable import AdjustableVirtual
from ..epics.adjustable import AdjustablePvEnum
@@ -195,13 +195,13 @@ class Pprm_dsd(Assembly):
self.pvname = pvname
self._append(
MotorRecord,
pvname_camera + ":MOTOR_PROBE",
self.pvname + ":MOTOR_PROBE",
name="target_pos",
is_setting=True,
)
self.camCA = CameraCA(pvname_camera)
self._append(
CameraBasler,
CameraPCO,
pvname_camera,
camserver_alias=f"{name} ({pvname_camera})",
name="camera",
+8 -270
View File
@@ -179,19 +179,22 @@ class Att_usd(Assembly):
),
}
def _updateE(self, energy=None, check_once=False):
def _updateE(self, energy=None, check_times=2):
n = 0
while not energy:
energy = PV("SAROP21-ARAMIS:ENERGY").value
if np.isnan(energy):
energy = PV("SARUN:FELPHOTENE").value * 1000
if energy < self.E_min:
n = n + 1
if n > check_times:
raise ValueError(f"Machine photon energy is below {self.E_min} since {self._sleeptime*n}s")
energy = None
print(
f"Machine photon energy is below {self.E_min} - waiting for the machine to recover"
)
sleep(self._sleeptime)
print(
f"Machine photon energy is below {self.E_min} - waiting for the machine to recover since {self._sleeptime*n}s"
)
self.E = energy
# print("Calculating transmission for %s eV" % energy)
return
def _calc_transmission(self):
@@ -380,268 +383,3 @@ class Att_usd(Assembly):
plt.tight_layout()
return act_values.T[1]
class att_usd(Assembly):
def __init__(self, name=None, alias_namespace=None, xp=None):
super().__init__(name=name)
# self.name = name
self.alias = Alias(name)
self.E = None
self.E_min = 1500
self._sleeptime = 1
self.motor_configuration = {
"transl_2": {
"id": "SARES23-LIC10",
"pv_descr": "att_usd transl 2",
"type": 1,
"sensor": 0,
"speed": 500,
"home_direction": "back",
"hl": 50,
"ll": -50,
},
"transl_1": {
"id": "SARES23-LIC12",
"pv_descr": "att_usd transl 1",
"type": 1,
"sensor": 0,
"speed": 500,
"home_direction": "back",
"hl": 50,
"ll": -50,
},
}
self._xp = xp
self.E = None
for name, config in self.motor_configuration.items():
self._append(
SmaractStreamdevice,
pvname=config["id"],
name=name,
is_setting=True,
is_display=False,
)
Al2O3 = materials.Al2O3
Si3N4 = materials.Amorphous(name="Si3N4", density=3440)
polyimide = materials.Amorphous(name="C35H28N2O7", density=1440)
self.targets_2 = {
"mat": np.array(
[
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
polyimide,
Al2O3,
]
),
"d": np.array(
[
2800,
2000,
1600,
1200,
800,
550,
420,
320,
240,
175,
125,
75,
30,
125,
0,
]
),
"pos": np.array(
[
38.3,
33.4,
27.7,
23.3,
18.8,
13.0,
8.0,
2.5,
-2.8,
-7.7,
-12.8,
-18.0,
-22.0,
-26.7,
-35.0,
]
),
}
self.targets_1 = {
"mat": np.array(
[
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
Al2O3,
polyimide,
polyimide,
polyimide,
Al2O3,
]
),
"d": np.array(
[2800, 1600, 800, 420, 240, 175, 125, 75, 30, 125, 50, 25, 0]
),
"pos": np.array(
[-37.7, -32.6, -27.3, -23, -18, -13, -7.8, -3, 1.7, 7.4, 12.6, 17.6, 25]
),
}
def _updateE(self, energy=None, check_once=False):
while not energy:
energy = PV("SARUN03-UIND030:FELPHOTENE").value
energy = energy * 1000
if energy < self.E_min:
energy = None
print(
f"Machine photon energy is below {self.E_min} - waiting for the machine to recover"
)
sleep(self._sleeptime)
self.E = energy
# print("Set energy to %s eV" % energy)
return
def _calc_transmission(self):
t1 = np.array(
[
np.exp(-d / mat.absorption_length(self.E))
for d, mat in zip(self.targets_1["d"], self.targets_1["mat"])
]
)
self.targets_1["t"] = t1
t2 = np.array(
[
np.exp(-d / mat.absorption_length(self.E))
for d, mat in zip(self.targets_2["d"], self.targets_2["mat"])
]
)
self.targets_2["t"] = t2
t_comb = (
(np.expand_dims(t1, axis=0)).T * (np.expand_dims(t2, axis=0))
).flatten()
pos_comb = np.array(
[[p1, p2] for p1 in self.targets_1["pos"] for p2 in self.targets_2["pos"]]
)
self.transmissions = {"t": t_comb, "pos": pos_comb}
def _find_nearest(self, a, a0):
"Element in nd array `a` closest to the scalar value `a0`"
idx = np.abs(a - a0).argmin()
return idx, a[idx]
def set_transmission(self, value):
self._updateE()
self._calc_transmission()
idx, t = self._find_nearest(self.transmissions["t"], value)
p1, p2 = self.transmissions["pos"][idx]
self._xp.close()
self.transl_1.set_target_value(p1)
self.transl_2.set_target_value(p2)
print(f"Set transmission to {t:0.2E} | Moving to pos {[p1, p2]}")
while (abs(p1 - self.transl_1.get_current_value()) > 0.05) or (
abs(p2 - self.transl_2.get_current_value() > 0.05)
):
sleep(0.1)
print("transmission changed")
self._xp.open()
def get_current_value(self):
self._updateE()
self._calc_transmission()
idx1, p1 = self._find_nearest(
self.targets_1["pos"], self.transl_1.get_current_value()
)
t1 = self.targets_1["t"][idx1]
idx2, p2 = self._find_nearest(
self.targets_2["pos"], self.transl_2.get_current_value()
)
t2 = self.targets_2["t"][idx2]
return t1 * t2
def set_stage_config(self):
for name, config in self.motor_configuration.items():
mot = self.__dict__[name]
mot.caqtdm_name(config["pv_descr"])
mot.stage_type(config["type"])
mot.sensor_type(config["sensor"])
mot.speed(config["speed"])
mot.limit_high(config["hl"])
mot.limit_low(config["ll"])
sleep(0.5)
mot.calibrate_sensor(1)
def home_smaract_stages(self, stages=None):
if stages == None:
stages = self.motor_configuration.keys()
print("#### Positions before homing ####")
print(self.__repr__())
for name in stages:
config = self.motor_configuration[name]
mot = self.__dict__[name]
print(
"#### Homing {} in {} direction ####".format(
name, config["home_direction"]
)
)
sleep(1)
if config["home_direction"] == "back":
mot.home_backward(1)
while mot.status_channel().value == 7:
sleep(1)
if mot.is_homed() == 0:
print(
"Homing failed, try homing {} in forward direction".format(name)
)
mot.home_forward(1)
elif config["home_direction"] == "forward":
mot.home_forward(1)
while mot.status_channel().value == 7:
sleep(1)
if mot.is_homed() == 0:
print(
"Homing failed, try homing {} in backward direction".format(
name
)
)
mot.home_backward(1)
def get_adjustable_positions_str(self):
ostr = "*****att_usd target position******\n"
for tkey, item in self.__dict__.items():
if hasattr(item, "get_current_value"):
pos = item.get_current_value()
ostr += " " + tkey.ljust(17) + " : % 14g\n" % pos
pos = self.get_current_value()
ostr += " " + "Transmission".ljust(17) + " : % 14.02E\n" % pos
return ostr
def __call__(self, *args, **kwargs):
self.set_transmission(*args, **kwargs)
def __repr__(self):
return self.get_adjustable_positions_str()
+13
View File
@@ -377,6 +377,19 @@ class LaserBernina(Assembly):
name="delay_pump",
is_setting=True,
)
self._append(
MotorRecord,
"SLAAR21-LMOT-M552:MOT",
name="delaystage_compensation",
is_setting=True,
)
self._append(
DelayTime,
self.delaystage_pump,
name="delay_compensation",
is_setting=True,
)
# self._append(
# Stage_LXT_Delay,
# self.delay_glob,