Compare commits

...

28 Commits

Author SHA1 Message Date
f8d9b55bc3 fix(file_writer): Fix file_writer format method.
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m55s
CI for csaxs_bec / test (pull_request) Successful in 1m54s
2026-03-27 20:11:24 +01:00
x12sa
b67e1c012c renamed rt flyer to rt positions
All checks were successful
CI for csaxs_bec / test (pull_request) Successful in 1m54s
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 1s
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-27 16:05:35 +01:00
x12sa
cbbec12d9b option to upload to a php interface
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-27 15:44:49 +01:00
x12sa
8f4a9f025e some adjustments
All checks were successful
CI for csaxs_bec / test (pull_request) Successful in 1m56s
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 2s
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-27 13:43:28 +01:00
x12sa
1b9b983ab2 wip optics config for energy device
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m56s
2026-03-27 12:48:51 +01:00
x12sa
d7b442969a added motors to endstation config 2026-03-27 12:48:51 +01:00
x12sa
f92db3f169 movable cards and more
All checks were successful
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 3s
CI for csaxs_bec / test (push) Successful in 1m55s
2026-03-26 16:38:51 +01:00
x12sa
55531c8a65 next version 2026-03-26 16:38:51 +01:00
x12sa
1d408818cc next iteration, seems a first good and usable ver. 2026-03-26 16:38:51 +01:00
x12sa
ae2045dd10 fixes in contrast and audio confirm logif 2026-03-26 16:38:51 +01:00
x12sa
fd4d455a5b webpage version2 2026-03-26 16:38:51 +01:00
x12sa
3411aaaeb4 first version of webpage 2026-03-26 16:38:51 +01:00
x12sa
d9fc3094b6 fixed width for logo in scilog
All checks were successful
CI for csaxs_bec / test (pull_request) Successful in 1m59s
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 2s
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-25 13:39:07 +01:00
x12sa
88df4781ec tags added
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m54s
2026-03-24 15:36:25 +01:00
x12sa
3b474c89c8 removed write subtomo to scilog 2026-03-24 15:34:36 +01:00
x12sa
68cc13e1d3 alignment scans to scilog 2026-03-24 15:33:14 +01:00
x12sa
700f3f9bb9 scilog tag added 2026-03-24 15:27:25 +01:00
x12sa
15a4d45f68 moved tomo_reconstruct to tomo_scan_projection
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m59s
2026-03-24 15:00:17 +01:00
x12sa
7c7f877d78 new logos for logbook 2026-03-24 14:59:50 +01:00
x12sa
5d61d756c9 logo and scilog newline fixed
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m56s
2026-03-23 17:00:28 +01:00
x12sa
b37ae3ef57 wip message to scilog when tomo starts
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m55s
2026-03-23 16:29:30 +01:00
x12sa
76ed858e5c added heartbeat, start and remaining time to progress
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-23 15:58:54 +01:00
x12sa
a0555def4d changed progress dict to global variable
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m56s
2026-03-23 15:48:17 +01:00
x12sa
c1ad2fc4c3 pdf status report fixes
All checks were successful
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-23 12:38:37 +01:00
x12sa
9eee4ee1f7 minor fixes during testing
All checks were successful
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 2s
CI for csaxs_bec / test (push) Successful in 1m55s
2026-03-19 11:17:41 +01:00
c97b00cc8c fix: flomni async readout
All checks were successful
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 2s
CI for csaxs_bec / test (push) Successful in 1m57s
2026-03-19 11:13:26 +01:00
d6a4fd37fc fix(mcs): fix _progress_udpate
Some checks failed
Read the Docs Deploy Trigger / trigger-rtd-webhook (push) Successful in 2s
CI for csaxs_bec / test (push) Has been cancelled
2026-03-19 11:11:54 +01:00
6d4c9d90fc fix(mcs): omit_mca_callbacks if stop is called. 2026-03-19 11:11:54 +01:00
21 changed files with 3481 additions and 724 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 562 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

View File

@@ -0,0 +1,89 @@
"""
LamNI/webpage_generator.py
===========================
LamNI-specific webpage generator subclass.
Integration (inside the LamNI __init__ / startup):
---------------------------------------------------
from csaxs_bec.bec_ipython_client.plugins.LamNI.webpage_generator import (
LamniWebpageGenerator,
)
self._webpage_gen = LamniWebpageGenerator(
bec_client=client,
output_dir="~/data/raw/webpage/",
)
self._webpage_gen.start()
Or use the factory (auto-selects by session name "lamni"):
----------------------------------------------------------
from csaxs_bec.bec_ipython_client.plugins.flomni.webpage_generator import (
make_webpage_generator,
)
self._webpage_gen = make_webpage_generator(bec, output_dir="~/data/raw/webpage/")
self._webpage_gen.start()
Interactive helpers:
--------------------
lamni._webpage_gen.status()
lamni._webpage_gen.verbosity = 2
lamni._webpage_gen.stop()
lamni._webpage_gen.start()
"""
from pathlib import Path
from csaxs_bec.bec_ipython_client.plugins.flomni.webpage_generator import (
WebpageGeneratorBase,
_safe_get,
_safe_float,
_gvar,
)
class LamniWebpageGenerator(WebpageGeneratorBase):
"""
LamNI-specific webpage generator.
Logo: LamNI.png from the same directory as this file.
Override _collect_setup_data() to add LamNI-specific temperatures,
sample name, and measurement settings.
"""
# TODO: fill in LamNI-specific device paths
# label -> dotpath under device_manager.devices
_TEMP_MAP = {
# "Sample": "lamni_temphum.temperature_sample",
# "OSA": "lamni_temphum.temperature_osa",
}
def _logo_path(self):
return Path(__file__).parent / "LamNI.png"
def _collect_setup_data(self) -> dict:
# ── LamNI-specific data goes here ─────────────────────────────
# Uncomment and adapt when device names are known:
#
# dm = self._bec.device_manager
# sample_name = _safe_get(dm, "lamni_samples.sample_names.sample0") or "N/A"
# temperatures = {
# label: _safe_float(_safe_get(dm, path))
# for label, path in self._TEMP_MAP.items()
# }
# settings = {
# "Sample name": sample_name,
# "FOV x / y": ...,
# "Exposure time": _gvar(self._bec, "tomo_countingtime", ".3f", " s"),
# "Angle step": _gvar(self._bec, "tomo_angle_stepsize", ".2f", "\u00b0"),
# }
# return {
# "type": "lamni",
# "sample_name": sample_name,
# "temperatures": temperatures,
# "settings": settings,
# }
# Placeholder — returns minimal info until implemented
return {
"type": "lamni",
# LamNI-specific data here
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

View File

@@ -21,6 +21,14 @@ from csaxs_bec.bec_ipython_client.plugins.omny.omny_general_tools import (
TomoIDManager,
)
# from csaxs_bec.bec_ipython_client.plugins.flomni.webpage_generator import (
# FlomniWebpageGenerator,
# VERBOSITY_SILENT, # 0 — no output
# VERBOSITY_NORMAL, # 1 — startup / stop messages only (default)
# VERBOSITY_VERBOSE, # 2 — one-line summary per cycle
# VERBOSITY_DEBUG, # 3 — full JSON payload per cycle
# )
logger = bec_logger.logger
if builtins.__dict__.get("bec") is not None:
@@ -778,7 +786,9 @@ class FlomniSampleTransferMixin:
dev.ftransy.controller.socket_put_confirmed("confirm=1")
else:
print("Stopping.")
exit
raise FlomniError(
"User abort sample transfer."
)
def ftransfer_gripper_is_open(self) -> bool:
status = bool(float(dev.ftransy.controller.socket_put_and_receive("MG @OUT[9]").strip()))
@@ -801,7 +811,8 @@ class FlomniSampleTransferMixin:
def ftransfer_gripper_move(self, position: int):
self.check_position_is_valid(position)
self._ftransfer_shiftx = -0.2
#this is not used for sample stage position!
self._ftransfer_shiftx = -0.15
self._ftransfer_shiftz = -0.5
fsamx_pos = dev.fsamx.readback.get()
@@ -821,7 +832,7 @@ class FlomniSampleTransferMixin:
self.check_tray_in()
if position == 0:
umv(dev.ftransx, 10.715 + 0.2, dev.ftransz, 3.5950)
umv(dev.ftransx, 11, dev.ftransz, 3.5950)
if position == 1:
umv(
dev.ftransx,
@@ -966,8 +977,6 @@ class FlomniSampleTransferMixin:
class FlomniAlignmentMixin:
import csaxs_bec
import os
from pathlib import Path
# Ensure this is a Path object, not a string
csaxs_bec_basepath = Path(csaxs_bec.__file__)
@@ -1208,6 +1217,76 @@ class FlomniAlignmentMixin:
return additional_correction_shift
class _ProgressProxy:
"""Dict-like proxy that persists the flOMNI progress dict as a BEC global variable.
Every read (`proxy["key"]`) fetches the current dict from the global var store,
and every write (`proxy["key"] = val`) fetches, updates, and saves it back.
This makes the progress state visible to all BEC client sessions via
``client.get_global_var("tomo_progress")``.
"""
_GLOBAL_VAR_KEY = "tomo_progress"
_DEFAULTS: dict = {
"subtomo": 0,
"subtomo_projection": 0,
"subtomo_total_projections": 1,
"projection": 0,
"total_projections": 1,
"angle": 0,
"tomo_type": 0,
"tomo_start_time": None,
"estimated_remaining_time": None,
"heartbeat": None,
}
def __init__(self, client):
self._client = client
# ------------------------------------------------------------------
# Internal helpers
# ------------------------------------------------------------------
def _load(self) -> dict:
val = self._client.get_global_var(self._GLOBAL_VAR_KEY)
if val is None:
return dict(self._DEFAULTS)
return val
def _save(self, data: dict) -> None:
self._client.set_global_var(self._GLOBAL_VAR_KEY, data)
# ------------------------------------------------------------------
# Dict-like interface
# ------------------------------------------------------------------
def __getitem__(self, key):
return self._load()[key]
def __setitem__(self, key, value) -> None:
data = self._load()
data[key] = value
self._save(data)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._load()!r})"
def get(self, key, default=None):
return self._load().get(key, default)
def update(self, *args, **kwargs) -> None:
"""Update multiple fields in a single round-trip."""
data = self._load()
data.update(*args, **kwargs)
self._save(data)
def reset(self) -> None:
"""Reset all progress fields to their default values."""
self._save(dict(self._DEFAULTS))
def as_dict(self) -> dict:
"""Return a plain copy of the current progress state."""
return self._load()
class Flomni(
FlomniInitStagesMixin,
FlomniSampleTransferMixin,
@@ -1230,14 +1309,18 @@ class Flomni(
self.corr_angle_y = []
self.corr_pos_y_2 = []
self.corr_angle_y_2 = []
self.progress = {}
self.progress["subtomo"] = 0
self.progress["subtomo_projection"] = 0
self.progress["subtomo_total_projections"] = 1
self.progress["projection"] = 0
self.progress["total_projections"] = 1
self.progress["angle"] = 0
self.progress["tomo_type"] = 0
self._progress_proxy = _ProgressProxy(self.client)
self._progress_proxy.reset()
from csaxs_bec.bec_ipython_client.plugins.flomni.flomni_webpage_generator import (
FlomniWebpageGenerator,
)
self._webpage_gen = FlomniWebpageGenerator(
bec_client=client,
output_dir="~/data/raw/webpage/",
upload_url="http://s1090968537.online.de/upload.php", # optional
)
self._webpage_gen.start()
self.OMNYTools = OMNYTools(self.client)
self.reconstructor = PtychoReconstructor(self.ptycho_reconstruct_foldername)
self.tomo_id_manager = TomoIDManager()
@@ -1293,6 +1376,42 @@ class Flomni(
self.special_angles = []
self.special_angle_repeats = 1
@property
def progress(self) -> _ProgressProxy:
"""Proxy dict backed by the BEC global variable ``tomo_progress``.
Readable from any BEC client session via::
client.get_global_var("tomo_progress")
Individual fields can be read and written just like a regular dict::
flomni.progress["projection"] # read
flomni.progress["projection"] = 42 # write (persists immediately)
To update multiple fields atomically use :py:meth:`_ProgressProxy.update`::
flomni.progress.update(projection=42, angle=90.0)
To reset all fields to their defaults::
flomni.progress.reset()
"""
return self._progress_proxy
@progress.setter
def progress(self, val: dict) -> None:
"""Replace the entire progress dict.
Accepts a plain :class:`dict` and persists it to the global var store.
Example::
flomni.progress = {"projection": 0, "total_projections": 100, ...}
"""
if not isinstance(val, dict):
raise TypeError(f"progress must be a dict, got {type(val).__name__!r}")
self._progress_proxy._save(val)
@property
def tomo_shellstep(self):
val = self.client.get_global_var("tomo_shellstep")
@@ -1479,21 +1598,11 @@ class Flomni(
def sample_name(self):
return self.sample_get_name(0)
def write_to_scilog(self, content, tags: list = None):
try:
if tags is not None:
tags.append("BEC")
else:
tags = ["BEC"]
msg = bec.logbook.LogbookMessage()
msg.add_text(content).add_tag(tags)
self.client.logbook.send_logbook_message(msg)
except Exception:
logger.warning("Failed to write to scilog.")
def tomo_alignment_scan(self):
"""
Performs a tomogram alignment scan.
Collects all scan numbers acquired during the alignment, prints them at the end,
and creates a BEC scilog text entry summarising the alignment scan numbers.
"""
if self.get_alignment_offset(0) == (0, 0, 0):
print("It appears that the xrayeye alignemtn was not performend or loaded. Aborting.")
@@ -1503,11 +1612,9 @@ class Flomni(
self.feye_out()
tags = ["BEC_alignment_tomo", self.sample_name]
self.write_to_scilog(
f"Starting alignment scan. First scan number: {bec.queue.next_scan_number}.", tags
)
start_angle = 0
alignment_scan_numbers = []
angle_end = start_angle + 180
for angle in np.linspace(start_angle, angle_end, num=int(180 / 45) + 1, endpoint=True):
@@ -1519,7 +1626,6 @@ class Flomni(
try:
start_scan_number = bec.queue.next_scan_number
self.tomo_scan_projection(angle)
self.tomo_reconstruct()
error_caught = False
except AlarmBase as exc:
if exc.alarm_type == "TimeoutError":
@@ -1533,24 +1639,27 @@ class Flomni(
end_scan_number = bec.queue.next_scan_number
for scan_nr in range(start_scan_number, end_scan_number):
self._write_tomo_scan_number(scan_nr, angle, 0)
#self._write_tomo_scan_number(scan_nr, angle, 0)
alignment_scan_numbers.append(scan_nr)
umv(dev.fsamroy, 0)
self.OMNYTools.printgreenbold(
"\n\nAlignment scan finished. Please run SPEC_ptycho_align and load the new fit."
"\n\nAlignment scan finished. Please run SPEC_ptycho_align and load the new fit by flomni.read_alignment_offset() ."
)
def _write_subtomo_to_scilog(self, subtomo_number):
dev = builtins.__dict__.get("dev")
bec = builtins.__dict__.get("bec")
if self.tomo_id > 0:
tags = ["BEC_subtomo", self.sample_name, f"tomo_id_{self.tomo_id}"]
else:
tags = ["BEC_subtomo", self.sample_name]
self.write_to_scilog(
f"Starting subtomo: {subtomo_number}. First scan number: {bec.queue.next_scan_number}.",
tags,
# summary of alignment scan numbers
scan_list_str = ", ".join(str(s) for s in alignment_scan_numbers)
#print(f"\nAlignment scan numbers ({len(alignment_scan_numbers)} total): {scan_list_str}")
# BEC scilog entry (no logo)
scilog_content = (
f"Alignment scan finished.\n"
f"Sample: {self.sample_name}\n"
f"Number of alignment scans: {len(alignment_scan_numbers)}\n"
f"Alignment scan numbers: {scan_list_str}\n"
)
print(scliog_content)
bec.messaging.scilog.new().add_text(scilog_content.replace("\n", "<br>")).add_tags("alignmentscan").send()
def sub_tomo_scan(self, subtomo_number, start_angle=None):
"""
@@ -1559,18 +1668,6 @@ class Flomni(
subtomo_number (int): The sub tomogram number.
start_angle (float, optional): The start angle of the scan. Defaults to None.
"""
# dev = builtins.__dict__.get("dev")
# bec = builtins.__dict__.get("bec")
# if self.tomo_id > 0:
# tags = ["BEC_subtomo", self.sample_name, f"tomo_id_{self.tomo_id}"]
# else:
# tags = ["BEC_subtomo", self.sample_name]
# self.write_to_scilog(
# f"Starting subtomo: {subtomo_number}. First scan number: {bec.queue.next_scan_number}.",
# tags,
# )
self._write_subtomo_to_scilog(subtomo_number)
if start_angle is not None:
print(f"Sub tomo scan with start angle {start_angle} requested.")
@@ -1670,6 +1767,7 @@ class Flomni(
successful = False
error_caught = False
if 0 <= angle < 180.05:
self.progress["heartbeat"] = datetime.datetime.now().isoformat()
print(f"Starting flOMNI scan for angle {angle} in subtomo {subtomo_number}")
self._print_progress()
while not successful:
@@ -1713,9 +1811,9 @@ class Flomni(
)
if self.OMNYTools.yesno("Shall I continue?", "n"):
print("OK")
else:
print("Stopping.")
return
else:
print("Stopping.")
return
self.flomnigui_show_progress()
@@ -1743,6 +1841,8 @@ class Flomni(
# self.write_pdf_report()
# else:
self.tomo_id = 0
self.write_pdf_report()
self.progress["tomo_start_time"] = datetime.datetime.now().isoformat()
with scans.dataset_id_on_hold:
if self.tomo_type == 1:
@@ -1762,7 +1862,6 @@ class Flomni(
while True:
angle, subtomo_number = self._golden(ii, self.golden_ratio_bunch_size, 180, 1)
if previous_subtomo_number != subtomo_number:
self._write_subtomo_to_scilog(subtomo_number)
if (
subtomo_number % 2 == 1
and ii > 10
@@ -1810,7 +1909,6 @@ class Flomni(
ii, int(180 / self.tomo_angle_stepsize), 180, 1, 0
)
if previous_subtomo_number != subtomo_number:
self._write_subtomo_to_scilog(subtomo_number)
if (
subtomo_number % 2 == 1
and ii > 10
@@ -1852,14 +1950,42 @@ class Flomni(
self._print_progress()
self.OMNYTools.printgreenbold("Tomoscan finished")
@staticmethod
def _format_duration(seconds: float) -> str:
"""Format a duration in seconds as a human-readable string, e.g. '2h 03m 15s'."""
seconds = int(seconds)
h, remainder = divmod(seconds, 3600)
m, s = divmod(remainder, 60)
if h > 0:
return f"{h}h {m:02d}m {s:02d}s"
if m > 0:
return f"{m}m {s:02d}s"
return f"{s}s"
def _print_progress(self):
# --- compute and store estimated remaining time -----------------------
start_str = self.progress.get("tomo_start_time")
projection = self.progress["projection"]
total = self.progress["total_projections"]
if start_str is not None and total > 0 and projection > 9:
elapsed = (
datetime.datetime.now() - datetime.datetime.fromisoformat(start_str)
).total_seconds()
rate = projection / elapsed # projections per second
remaining_s = (total - projection) / rate
self.progress["estimated_remaining_time"] = remaining_s
eta_str = self._format_duration(remaining_s)
else:
eta_str = "N/A"
# ----------------------------------------------------------------------
print("\x1b[95mProgress report:")
print(f"Tomo type: ....................... {self.progress['tomo_type']}")
print(f"Projection: ...................... {self.progress['projection']:.0f}")
print(f"Total projections expected ....... {self.progress['total_projections']}")
print(f"Angle: ........................... {self.progress['angle']}")
print(f"Current subtomo: ................. {self.progress['subtomo']}")
print(f"Current projection within subtomo: {self.progress['subtomo_projection']}\x1b[0m")
print(f"Current projection within subtomo: {self.progress['subtomo_projection']}")
print(f"Estimated remaining time: ........ {eta_str}\x1b[0m")
self._flomnigui_update_progress()
def add_sample_database(
@@ -1883,7 +2009,6 @@ class Flomni(
return
self.tomo_scan_projection(angle)
self.tomo_reconstruct()
def _golden(self, ii, howmany_sorted, maxangle, reverse=False):
"""returns the iis golden ratio angle of sorted bunches of howmany_sorted and its subtomo number"""
@@ -1988,7 +2113,7 @@ class Flomni(
f"{str(datetime.datetime.now())}: flomni scan projection at angle {angle}, scan"
f" number {bec.queue.next_scan_number}.\n"
)
# self.write_to_scilog(log_message, ["BEC_scans", self.sample_name])
scans.flomni_fermat_scan(
fovx=self.fovx,
fovy=self.fovy,
@@ -2001,6 +2126,9 @@ class Flomni(
corridor_size=corridor_size,
)
self.tomo_reconstruct()
def tomo_parameters(self):
"""print and update the tomo parameters"""
print("Current settings:")
@@ -2139,19 +2267,21 @@ class Flomni(
+ ' 888 888 "Y88888P" 888 888 888 Y888 8888888 \n'
)
padding = 20
fovxy = f"{self.fovx:.2f}/{self.fovy:.2f}"
stitching = f"{self.stitch_x:.2f}/{self.stitch_y:.2f}"
fovxy = f"{self.fovx:.1f}/{self.fovy:.1f}"
stitching = f"{self.stitch_x:.0f}/{self.stitch_y:.0f}"
dataset_id = str(self.client.queue.next_dataset_number)
account = bec.active_account
content = [
f"{'Sample Name:':<{padding}}{self.sample_name:>{padding}}\n",
f"{'Measurement ID:':<{padding}}{str(self.tomo_id):>{padding}}\n",
f"{'Dataset ID:':<{padding}}{dataset_id:>{padding}}\n",
f"{'Sample Info:':<{padding}}{'Sample Info':>{padding}}\n",
f"{'e-account:':<{padding}}{str(self.client.username):>{padding}}\n",
f"{'e-account:':<{padding}}{str(account):>{padding}}\n",
f"{'Number of projections:':<{padding}}{int(180 / self.tomo_angle_stepsize * 8):>{padding}}\n",
f"{'First scan number:':<{padding}}{self.client.queue.next_scan_number:>{padding}}\n",
f"{'Last scan number approx.:':<{padding}}{self.client.queue.next_scan_number + int(180 / self.tomo_angle_stepsize * 8) + 10:>{padding}}\n",
f"{'Current photon energy:':<{padding}}{dev.mokev.read()['mokev']['value']:>{padding}.4f}\n",
f"{'Current photon energy:':<{padding}}To be implemented\n",
#f"{'Current photon energy:':<{padding}}{dev.mokev.read()['mokev']['value']:>{padding}.4f}\n",
f"{'Exposure time:':<{padding}}{self.tomo_countingtime:>{padding}.2f}\n",
f"{'Fermat spiral step size:':<{padding}}{self.tomo_shellstep:>{padding}.2f}\n",
f"{'FOV:':<{padding}}{fovxy:>{padding}}\n",
@@ -2160,20 +2290,38 @@ class Flomni(
f"{'Angular step within sub-tomogram:':<{padding}}{self.tomo_angle_stepsize:>{padding}.2f}\n",
]
content = "".join(content)
user_target = os.path.expanduser(f"~/Data10/documentation/tomo_scan_ID_{self.tomo_id}.pdf")
user_target = os.path.expanduser(f"~/data/raw/documentation/tomo_scan_ID_{self.tomo_id}.pdf")
with PDFWriter(user_target) as file:
file.write(header)
file.write(content)
subprocess.run(
"xterm /work/sls/spec/local/XOMNY/bin/upload/upload_last_pon.sh &", shell=True
)
# subprocess.run(
# "xterm /work/sls/spec/local/XOMNY/bin/upload/upload_last_pon.sh &", shell=True
# )
# status = subprocess.run(f"cp /tmp/spec-e20131-specES1.pdf {user_target}", shell=True)
msg = bec.logbook.LogbookMessage()
logo_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "LamNI_logo.png")
msg.add_file(logo_path).add_text("".join(content).replace("\n", "</p><p>")).add_tag(
["BEC", "tomo_parameters", f"dataset_id_{dataset_id}", "LamNI", self.sample_name]
)
self.client.logbook.send_logbook_message(msg)
# msg = bec.tomo_progress.tomo_progressMessage()
# logo_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "LamNI_logo.png")
# msg.add_file(logo_path).add_text("".join(content).replace("\n", "</p><p>")).add_tag(
# ["BEC", "tomo_parameters", f"dataset_id_{dataset_id}", "flOMNI", self.sample_name]
# )
# self.client.tomo_progress.send_tomo_progress_message("~/data/raw/documentation/tomo_scan_ID_{self.tomo_id}.pdf").send()
import csaxs_bec
# Ensure this is a Path object, not a string
csaxs_bec_basepath = Path(csaxs_bec.__file__)
logo_file_rel = "flOMNI.png"
# Build the absolute path correctly
logo_file = (
csaxs_bec_basepath.parent
/ "bec_ipython_client"
/ "plugins"
/ "flomni"
/ logo_file_rel
).resolve()
print(logo_file)
bec.messaging.scilog.new().add_attachment(logo_file, width=200).add_text(content.replace("\n", "<br>")).add_tags("tomoscan").send()
if __name__ == "__main__":

View File

@@ -50,8 +50,6 @@ class FlomniOpticsMixin:
# move both axes to the desired "in" positions
umv(dev.feyex, feyex_in, dev.feyey, feyey_in)
self.xrayeye_update_frame()
def _ffzp_in(self):
foptx_in = self._get_user_param_safe("foptx", "in")
fopty_in = self._get_user_param_safe("fopty", "in")

File diff suppressed because it is too large Load Diff

View File

@@ -223,6 +223,14 @@ class flomniGuiTools:
self._flomnigui_update_progress()
def _flomnigui_update_progress(self):
"""Update the progress ring bar and center label from the current progress state.
``self.progress`` is backed by the BEC global variable ``tomo_progress``
(see :class:`_ProgressProxy` in ``flomni.py``), so this method reflects
the live state that is also accessible from other BEC client sessions via::
client.get_global_var("tomo_progress")
"""
main_progress_ring = self.progressbar.rings[0]
subtomo_progress_ring = self.progressbar.rings[1]
if self.progressbar is not None:
@@ -235,6 +243,31 @@ class flomniGuiTools:
main_progress_ring.set_value(progress)
subtomo_progress_ring.set_value(subtomo_progress)
# --- format start time for display --------------------------------
start_str = self.progress.get("tomo_start_time")
if start_str is not None:
import datetime as _dt
start_display = _dt.datetime.fromisoformat(start_str).strftime("%Y-%m-%d %H:%M:%S")
else:
start_display = "N/A"
# --- format estimated remaining time ------------------------------
remaining_s = self.progress.get("estimated_remaining_time")
if remaining_s is not None and remaining_s >= 0:
import datetime as _dt
remaining_s = int(remaining_s)
h, rem = divmod(remaining_s, 3600)
m, s = divmod(rem, 60)
if h > 0:
eta_display = f"{h}h {m:02d}m {s:02d}s"
elif m > 0:
eta_display = f"{m}m {s:02d}s"
else:
eta_display = f"{s}s"
else:
eta_display = "N/A"
# ------------------------------------------------------------------
text = (
f"Progress report:\n"
f" Tomo type: {self.progress['tomo_type']}\n"
@@ -243,7 +276,9 @@ class flomniGuiTools:
f" Angle: {self.progress['angle']:.1f}\n"
f" Current subtomo: {self.progress['subtomo']}\n"
f" Current projection within subtomo: {self.progress['subtomo_projection']}\n"
f" Total projections per subtomo: {int(self.progress['subtomo_total_projections'])}"
f" Total projections per subtomo: {int(self.progress['subtomo_total_projections'])}\n"
f" Scan started: {start_display}\n"
f" Est. remaining: {eta_display}"
)
self.progressbar.set_center_label(text)

View File

@@ -253,6 +253,8 @@ class XrayEyeAlign:
umv(dev.rtx, 0)
print("You are ready to remove the xray eye and start ptychography scans.")
print("Fine alignment: flomni.tomo_parameters() , then flomni.tomo_alignment_scan()")
print("After that, run the fit in Matlab and load the new fit flomni.read_alignment_offset()")
def write_output(self):
file = os.path.expanduser("~/Data10/specES1/internal/xrayeye_alignmentvalues")

Binary file not shown.

After

Width:  |  Height:  |  Size: 359 KiB

View File

@@ -0,0 +1,96 @@
"""
omny/webpage_generator.py
==========================
OMNY-specific webpage generator subclass.
Integration (inside the OMNY __init__ / startup):
--------------------------------------------------
from csaxs_bec.bec_ipython_client.plugins.omny.webpage_generator import (
OmnyWebpageGenerator,
)
self._webpage_gen = OmnyWebpageGenerator(
bec_client=client,
output_dir="~/data/raw/webpage/",
)
self._webpage_gen.start()
Or use the factory (auto-selects by session name "omny"):
---------------------------------------------------------
from csaxs_bec.bec_ipython_client.plugins.flomni.webpage_generator import (
make_webpage_generator,
)
self._webpage_gen = make_webpage_generator(bec, output_dir="~/data/raw/webpage/")
self._webpage_gen.start()
Interactive helpers:
--------------------
omny._webpage_gen.status()
omny._webpage_gen.verbosity = 2
omny._webpage_gen.stop()
omny._webpage_gen.start()
"""
from pathlib import Path
from csaxs_bec.bec_ipython_client.plugins.flomni.webpage_generator import (
WebpageGeneratorBase,
_safe_get,
_safe_float,
_gvar,
)
class OmnyWebpageGenerator(WebpageGeneratorBase):
"""
OMNY-specific webpage generator.
Logo: OMNY.png from the same directory as this file.
Override _collect_setup_data() to add OMNY-specific temperatures,
sample name, and measurement settings.
The old OMNY spec webpage showed:
- Cryo temperatures (XOMNY-TEMP-CRYO-A/B)
- Per-channel temperatures (XOMNY-TEMP1..48)
- Dewar pressure / LN2 flow
- Interferometer strengths (OINTERF)
Map these to BEC device paths below once available.
"""
# TODO: fill in OMNY-specific device paths
# label -> dotpath under device_manager.devices
_TEMP_MAP = {
# "Sample (cryo A)": "omny_temp.cryo_a",
# "Cryo head (B)": "omny_temp.cryo_b",
}
def _logo_path(self):
return Path(__file__).parent / "OMNY.png"
def _collect_setup_data(self) -> dict:
# ── OMNY-specific data goes here ──────────────────────────────
# Uncomment and adapt when device names are known:
#
# dm = self._bec.device_manager
# sample_name = _safe_get(dm, "omny_samples.sample_names.sample0") or "N/A"
# temperatures = {
# label: _safe_float(_safe_get(dm, path))
# for label, path in self._TEMP_MAP.items()
# }
# settings = {
# "Sample name": sample_name,
# "FOV x / y": ...,
# "Exposure time": _gvar(self._bec, "tomo_countingtime", ".3f", " s"),
# "Angle step": _gvar(self._bec, "tomo_angle_stepsize", ".2f", "\u00b0"),
# }
# return {
# "type": "omny",
# "sample_name": sample_name,
# "temperatures": temperatures,
# "settings": settings,
# }
# Placeholder — returns minimal info until implemented
return {
"type": "omny",
# OMNY-specific data here
}

View File

@@ -72,7 +72,7 @@ xbpm3x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -95,7 +95,7 @@ xbpm3y:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -118,7 +118,7 @@ sl3trxi:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -141,7 +141,7 @@ sl3trxo:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -164,7 +164,7 @@ sl3trxb:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -187,7 +187,7 @@ sl3trxt:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -210,7 +210,7 @@ fast_shutter_n1_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -234,7 +234,7 @@ fast_shutter_o1_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -257,7 +257,7 @@ fast_shutter_o2_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -280,7 +280,7 @@ filter_array_1_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -303,7 +303,7 @@ filter_array_2_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -326,7 +326,7 @@ filter_array_3_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -349,7 +349,7 @@ filter_array_4_x:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -372,7 +372,7 @@ sl4trxi:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -395,7 +395,7 @@ sl4trxo:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -418,7 +418,7 @@ sl4trxb:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -441,7 +441,7 @@ sl4trxt:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -466,7 +466,7 @@ sl5trxi:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -489,7 +489,7 @@ sl5trxo:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -512,7 +512,7 @@ sl5trxb:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -535,7 +535,7 @@ sl5trxt:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -558,7 +558,7 @@ xbimtrx:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -581,7 +581,7 @@ xbimtry:
# precision: 3
# tolerance: 0.005
enabled: true
onFailure: buffer
onFailure: retry
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
@@ -589,3 +589,237 @@ xbimtry:
init_position: 0
# bl_smar_stage to use csaxs reference method. assign number according to axis channel
bl_smar_stage: 1
################### XBOX related ###################
# we assue the epics settings for resolution, velocity etc. are correct
# we do not overwrite from here
aptrx:
description: Aperture pinhole X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-PIN1:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
aptry:
description: Aperture pinhole Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-PIN1:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
ebtrx:
description: Exposure box aperture X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-EB:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
ebtry:
description: Exposure box aperture Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-EB:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
ebtrz:
description: Exposure box aperture Z
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-EB:TRZ1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
ebsupport:
description: Exposure box granite support Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-EH1-EB:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
fttrx1:
description: FTS1 translation X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-FTS1:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
fttry1:
description: FTS1 translation Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-FTS1:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
fttrx2:
description: FTS2 translation X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-FTS2:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
fttry2:
description: FTS2 translation Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-FTS2:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
fttrz:
description: FTS1 translation Z
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-FTS1:TRZ1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
bs1x:
description: Beamstop 1 X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-BS1:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
bs1y:
description: Beamstop 1 Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-BS1:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
bs2x:
description: Beamstop 2 X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-BS2:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
bs2y:
description: Beamstop 2 Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-BS2:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
dttrx:
description: Detector table X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-DETT:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
dttry:
description: Detector table Y
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-DETT:TRY1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
dttrz:
description: Detector table Z
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-DETT:TRZ1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false
dettrx:
description: Detector 1 X
deviceClass: ophyd_devices.devices.psi_motor.EpicsUserMotorVME
deviceConfig:
prefix: X12SA-ES1-DET1:TRX1
deviceTags:
- cSAXS_ES
onFailure: retry
enabled: true
readoutPriority: baseline
softwareTrigger: false

View File

@@ -68,91 +68,106 @@ ccmx:
- cSAXS
- optics
# ccm_energy:
# readoutPriority: baseline
# deviceClass: ophyd_devices.devices.simple_positioner.PSIPositionerBase
# prefix: "X12SA-OP-CCM1:"
# override_suffixes:
# user_readback: "ENERGY-GET"
# user_setpoint: "ENERGY-SET"
# velocity: "ROTY:VELO"
# deviceTags:
# - user motors
# enabled: true
# readOnly: false
##########################################################################
######################## SMARACT STAGES ##################################
##########################################################################
xbpm2x:
description: X-ray beam position monitor 1 in OPbox
deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
deviceConfig:
axis_Id: A
host: x12sa-eb-smaract-mcs-03.psi.ch
limits:
- -200
- 200
port: 5000
sign: 1
enabled: true
onFailure: buffer
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
userParameter:
# bl_smar_stage to use csaxs reference method. assign number according to axis channel
bl_smar_stage: 0
# xbpm2x:
# description: X-ray beam position monitor 1 in OPbox
# deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
# deviceConfig:
# axis_Id: A
# host: x12sa-eb-smaract-mcs-03.psi.ch
# limits:
# - -200
# - 200
# port: 5000
# sign: 1
# enabled: true
# onFailure: buffer
# readOnly: false
# readoutPriority: baseline
# connectionTimeout: 20
# userParameter:
# # bl_smar_stage to use csaxs reference method. assign number according to axis channel
# bl_smar_stage: 0
xbpm2y:
description: X-ray beam position monitor 1 in OPbox
deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
deviceConfig:
axis_Id: B
host: x12sa-eb-smaract-mcs-03.psi.ch
limits:
- -200
- 200
port: 5000
sign: 1
enabled: true
onFailure: buffer
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
userParameter:
# bl_smar_stage to use csaxs reference method. assign number according to axis channel
bl_smar_stage: 1
# xbpm2y:
# description: X-ray beam position monitor 1 in OPbox
# deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
# deviceConfig:
# axis_Id: B
# host: x12sa-eb-smaract-mcs-03.psi.ch
# limits:
# - -200
# - 200
# port: 5000
# sign: 1
# enabled: true
# onFailure: buffer
# readOnly: false
# readoutPriority: baseline
# connectionTimeout: 20
# userParameter:
# # bl_smar_stage to use csaxs reference method. assign number according to axis channel
# bl_smar_stage: 1
# cu_foilx:
# description: Cu foil in OPbox
# deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
# deviceConfig:
# axis_Id: C
# host: x12sa-eb-smaract-mcs-03.psi.ch
# limits:
# - -200
# - 200
# port: 5000
# sign: 1
# enabled: true
# onFailure: buffer
# readOnly: false
# readoutPriority: baseline
# connectionTimeout: 20
# userParameter:
# # bl_smar_stage to use csaxs reference method. assign number according to axis channel
# bl_smar_stage: 2
# scinx:
# description: scintillator in OPbox
# deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
# deviceConfig:
# axis_Id: D
# host: x12sa-eb-smaract-mcs-03.psi.ch
# limits:
# - -200
# - 200
# port: 5000
# sign: 1
# enabled: true
# onFailure: buffer
# readOnly: false
# readoutPriority: baseline
# connectionTimeout: 20
# userParameter:
# # bl_smar_stage to use csaxs reference method. assign number according to axis channel
# bl_smar_stage: 3
cu_foilx:
description: Cu foil in OPbox
deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
deviceConfig:
axis_Id: C
host: x12sa-eb-smaract-mcs-03.psi.ch
limits:
- -200
- 200
port: 5000
sign: 1
enabled: true
onFailure: buffer
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
userParameter:
# bl_smar_stage to use csaxs reference method. assign number according to axis channel
bl_smar_stage: 2
scinx:
description: scintillator in OPbox
deviceClass: csaxs_bec.devices.smaract.smaract_ophyd.SmaractMotor
deviceConfig:
axis_Id: D
host: x12sa-eb-smaract-mcs-03.psi.ch
limits:
- -200
- 200
port: 5000
sign: 1
enabled: true
onFailure: buffer
readOnly: false
readoutPriority: baseline
connectionTimeout: 20
userParameter:
# bl_smar_stage to use csaxs reference method. assign number according to axis channel
bl_smar_stage: 3
# dmm1_trx_readback_example: # This is the same template as for i.e. bpm4i

View File

@@ -10,8 +10,8 @@
endstation:
- !include ./bl_endstation.yaml
detectors:
- !include ./bl_detectors.yaml
# detectors:
# - !include ./bl_detectors.yaml
#sastt:
# - !include ./sastt.yaml

View File

@@ -395,6 +395,16 @@ rtz:
readoutPriority: on_request
connectionTimeout: 20
rt_positions:
deviceClass: csaxs_bec.devices.omny.rt.rt_flomni_ophyd.RtFlomniFlyer
deviceConfig:
host: mpc2844.psi.ch
port: 2222
readoutPriority: async
connectionTimeout: 20
enabled: true
readOnly: False
############################################################
####################### Cameras ############################
############################################################
@@ -512,6 +522,18 @@ omny_panda:
FMC_IN.VAL2.Min: cap_voltage_fzp_x_min
FMC_IN.VAL2.Max: cap_voltage_fzp_x_max
FMC_IN.VAL2.Mean: cap_voltage_fzp_x_mean
INENC1.VAL.Max: interf_st_fzp_y_max
INENC1.VAL.Mean: interf_st_fzp_y_mean
INENC1.VAL.Min: interf_st_fzp_y_min
INENC2.VAL.Max: interf_st_fzp_x_max
INENC2.VAL.Mean: interf_st_fzp_x_mean
INENC2.VAL.Min: interf_st_fzp_x_min
INENC3.VAL.Max: interf_st_rotz_max
INENC3.VAL.Mean: interf_st_rotz_mean
INENC3.VAL.Min: interf_st_rotz_min
INENC4.VAL.Max: interf_st_rotx_max
INENC4.VAL.Mean: interf_st_rotx_mean
INENC4.VAL.Min: interf_st_rotx_min
deviceTags:
- detector
enabled: true

View File

@@ -317,8 +317,6 @@ class MCSCardCSAXS(PSIDeviceBase, MCSCard):
try:
scan_done = bool(value == self._num_total_triggers)
self.progress.put(value=value, max_value=self._num_total_triggers, done=scan_done)
if scan_done:
self._scan_done_event.set()
except Exception:
content = traceback.format_exc()
logger.info(f"Device {self.name} error: {content}")
@@ -393,6 +391,7 @@ class MCSCardCSAXS(PSIDeviceBase, MCSCard):
self._current_data_index = 0
# NOTE Make sure that the signal that omits mca callbacks is cleared
# DO NOT REMOVE!!
self._omit_mca_callbacks.clear()
# For a fly scan we need to start the mcs card ourselves
@@ -563,8 +562,9 @@ class MCSCardCSAXS(PSIDeviceBase, MCSCard):
def on_stop(self) -> None:
"""Hook called when the device is stopped. In addition, any status that is registered through cancel_on_stop will be cancelled here."""
self.stop_all.put(1)
self.erase_all.put(1)
with suppress_mca_callbacks(self):
self.stop_all.put(1)
self.erase_all.put(1)
def mcs_recovery(self, timeout: int = 1) -> None:
"""

View File

@@ -1,20 +1,18 @@
import threading
import time
from typing import List
import numpy as np
from bec_lib import bec_logger, messages
from bec_lib.endpoints import MessageEndpoints
from bec_lib import bec_logger
from ophyd import Component as Cpt
from ophyd import Device, PositionerBase, Signal
from ophyd.status import wait as status_wait
from ophyd.utils import LimitError
from ophyd_devices import AsyncMultiSignal, DeviceStatus, ProgressSignal
from ophyd_devices.utils.controller import Controller, threadlocked
from ophyd_devices.utils.socket import SocketIO, raise_if_disconnected
from prettytable import PrettyTable
from csaxs_bec.devices.omny.rt.rt_ophyd import (
BECConfigError,
RtCommunicationError,
RtError,
RtReadbackSignal,
@@ -432,27 +430,6 @@ class RtFlomniController(Controller):
t.add_row([i, self.read_ssi_interferometer(i)])
print(t)
def _get_signals_from_table(self, return_table) -> dict:
self.average_stdeviations_x_st_fzp += float(return_table[4])
self.average_stdeviations_y_st_fzp += float(return_table[7])
signals = {
"target_x": {"value": float(return_table[2])},
"average_x_st_fzp": {"value": float(return_table[3])},
"stdev_x_st_fzp": {"value": float(return_table[4])},
"target_y": {"value": float(return_table[5])},
"average_y_st_fzp": {"value": float(return_table[6])},
"stdev_y_st_fzp": {"value": float(return_table[7])},
"average_rotz": {"value": float(return_table[8])},
"stdev_rotz": {"value": float(return_table[9])},
"average_stdeviations_x_st_fzp": {
"value": self.average_stdeviations_x_st_fzp / (int(return_table[0]) + 1)
},
"average_stdeviations_y_st_fzp": {
"value": self.average_stdeviations_y_st_fzp / (int(return_table[0]) + 1)
},
}
return signals
@threadlocked
def start_scan(self):
if not self.feedback_is_running():
@@ -492,91 +469,6 @@ class RtFlomniController(Controller):
current_position_in_scan = int(float(return_table[2]))
return (mode, number_of_positions_planned, current_position_in_scan)
def read_positions_from_sampler(self):
# this was for reading after the scan completed
number_of_samples_to_read = 1 # self.get_scan_status()[1] #number of valid samples, will be updated upon first data read
read_counter = 0
self.average_stdeviations_x_st_fzp = 0
self.average_stdeviations_y_st_fzp = 0
self.average_lamni_angle = 0
mode, number_of_positions_planned, current_position_in_scan = self.get_scan_status()
# if not (mode==2 or mode==3):
# error
self.device_manager.connector.set(
MessageEndpoints.device_status("rt_scan"),
messages.DeviceStatusMessage(
device="rt_scan", status=1, metadata=self.readout_metadata
),
)
# while scan is running
while mode > 0:
# TODO here?: scan abortion if no progress in scan *raise error
# logger.info(f"Current scan position {current_position_in_scan} out of {number_of_positions_planned}")
mode, number_of_positions_planned, current_position_in_scan = self.get_scan_status()
time.sleep(0.01)
if current_position_in_scan > 5:
while current_position_in_scan > read_counter + 1:
return_table = (self.socket_put_and_receive(f"r{read_counter}")).split(",")
# logger.info(f"{return_table}")
logger.info(f"Read {read_counter} out of {number_of_positions_planned}")
read_counter = read_counter + 1
signals = self._get_signals_from_table(return_table)
self.publish_device_data(signals=signals, point_id=int(return_table[0]))
time.sleep(0.05)
# read the last samples even though scan is finished already
while number_of_positions_planned > read_counter:
return_table = (self.socket_put_and_receive(f"r{read_counter}")).split(",")
logger.info(f"Read {read_counter} out of {number_of_positions_planned}")
# logger.info(f"{return_table}")
read_counter = read_counter + 1
signals = self._get_signals_from_table(return_table)
self.publish_device_data(signals=signals, point_id=int(return_table[0]))
self.device_manager.connector.set(
MessageEndpoints.device_status("rt_scan"),
messages.DeviceStatusMessage(
device="rt_scan", status=0, metadata=self.readout_metadata
),
)
logger.info(
"Flomni statistics: Average of all standard deviations: x"
f" {self.average_stdeviations_x_st_fzp/read_counter*1000:.1f}, y"
f" {self.average_stdeviations_y_st_fzp/read_counter*1000:.1f}"
)
def publish_device_data(self, signals, point_id):
self.device_manager.connector.set_and_publish(
MessageEndpoints.device_read("rt_flomni"),
messages.DeviceMessage(
signals=signals, metadata={"point_id": point_id, **self.readout_metadata}
),
)
def start_readout(self):
readout = threading.Thread(target=self.read_positions_from_sampler)
readout.start()
def kickoff(self, metadata):
self.readout_metadata = metadata
while not self._min_scan_buffer_reached:
time.sleep(0.001)
self.start_scan()
time.sleep(0.1)
self.start_readout()
class RtFlomniReadbackSignal(RtReadbackSignal):
@retry_once
@@ -844,6 +736,185 @@ class RtFlomniMotor(Device, PositionerBase):
return super().stop(success=success)
class RtFlomniFlyer(Device):
USER_ACCESS = ["controller"]
data = Cpt(
AsyncMultiSignal,
name="data",
signals=[
"target_x",
"average_x_st_fzp",
"stdev_x_st_fzp",
"target_y",
"average_y_st_fzp",
"stdev_y_st_fzp",
"average_rotz",
"stdev_rotz",
"average_stdeviations_x_st_fzp",
"average_stdeviations_y_st_fzp",
],
ndim=1,
async_update={"type": "add", "max_shape": [None]},
max_size=1000,
)
progress = Cpt(
ProgressSignal, doc="ProgressSignal indicating the progress of the device during a scan."
)
def __init__(
self,
prefix="",
*,
name,
kind=None,
read_attrs=None,
configuration_attrs=None,
parent=None,
host="mpc2844.psi.ch",
port=2222,
socket_cls=SocketIO,
device_manager=None,
**kwargs,
):
super().__init__(prefix=prefix, name=name, parent=parent, **kwargs)
self.shutdown_event = threading.Event()
self.controller = RtFlomniController(
socket_cls=socket_cls, socket_host=host, socket_port=port, device_manager=device_manager
)
self.average_stdeviations_x_st_fzp = 0
self.average_stdeviations_y_st_fzp = 0
self.average_lamni_angle = 0
self.readout_thread = None
self.scan_done_event = threading.Event()
self.scan_done_event.set()
def read_positions_from_sampler(self, status: DeviceStatus):
"""
Read the positions from the sampler and update the data signal.
This function runs in a separate thread and continuously checks the
scan status.
Args:
status (DeviceStatus): The status object to update when the readout is complete.
"""
read_counter = 0
self.average_stdeviations_x_st_fzp = 0
self.average_stdeviations_y_st_fzp = 0
self.average_lamni_angle = 0
mode, number_of_positions_planned, current_position_in_scan = (
self.controller.get_scan_status()
)
# while scan is running
while mode > 0 and not self.shutdown_event.wait(0.01):
# logger.info(f"Current scan position {current_position_in_scan} out of {number_of_positions_planned}")
mode, number_of_positions_planned, current_position_in_scan = (
self.controller.get_scan_status()
)
if current_position_in_scan > 5:
while current_position_in_scan > read_counter + 1:
return_table = (
self.controller.socket_put_and_receive(f"r{read_counter}")
).split(",")
logger.info(f"Read {read_counter} out of {number_of_positions_planned}")
self.progress.put(
value=read_counter, max_value=number_of_positions_planned, done=False
)
read_counter = read_counter + 1
signals = self._get_signals_from_table(return_table)
self.data.set(signals)
if self.shutdown_event.wait(0.05):
logger.info("Shutdown event set, stopping readout.")
# if we are here, the shutdown_event is set. We can exit the readout loop.
status.set_finished()
return
# read the last samples even though scan is finished already
while number_of_positions_planned > read_counter and not self.shutdown_event.is_set():
return_table = (self.controller.socket_put_and_receive(f"r{read_counter}")).split(",")
logger.info(f"Read {read_counter} out of {number_of_positions_planned}")
self.progress.put(value=read_counter, max_value=number_of_positions_planned, done=False)
read_counter = read_counter + 1
signals = self._get_signals_from_table(return_table)
self.data.set(signals)
# NOTE: No need to set the status to failed if the shutdown_event is set.
# The stop() method will take care of that.
status.set_finished()
self.progress.put(value=read_counter, max_value=number_of_positions_planned, done=True)
logger.info(
"Flomni statistics: Average of all standard deviations: x"
f" {self.average_stdeviations_x_st_fzp/read_counter*1000:.1f}, y"
f" {self.average_stdeviations_y_st_fzp/read_counter*1000:.1f}"
)
def _get_signals_from_table(self, return_table) -> dict:
self.average_stdeviations_x_st_fzp += float(return_table[4])
self.average_stdeviations_y_st_fzp += float(return_table[7])
signals = {
"target_x": {"value": float(return_table[2])},
"average_x_st_fzp": {"value": float(return_table[3])},
"stdev_x_st_fzp": {"value": float(return_table[4])},
"target_y": {"value": float(return_table[5])},
"average_y_st_fzp": {"value": float(return_table[6])},
"stdev_y_st_fzp": {"value": float(return_table[7])},
"average_rotz": {"value": float(return_table[8])},
"stdev_rotz": {"value": float(return_table[9])},
"average_stdeviations_x_st_fzp": {
"value": self.average_stdeviations_x_st_fzp / (int(return_table[0]) + 1)
},
"average_stdeviations_y_st_fzp": {
"value": self.average_stdeviations_y_st_fzp / (int(return_table[0]) + 1)
},
}
return signals
def stage(self):
self.shutdown_event.clear()
self.scan_done_event.set()
return super().stage()
def start_readout(self, status: DeviceStatus):
self.readout_thread = threading.Thread(
target=self.read_positions_from_sampler, args=(status,)
)
self.readout_thread.start()
def kickoff(self) -> DeviceStatus:
self.shutdown_event.clear()
self.scan_done_event.clear()
while not self.controller._min_scan_buffer_reached and not self.shutdown_event.wait(0.001):
...
self.controller.start_scan()
self.shutdown_event.wait(0.1)
status = DeviceStatus(self)
status.set_finished()
return status
def complete(self) -> DeviceStatus:
"""Wait until the flyer is done."""
if self.scan_done_event.is_set():
# if the scan_done_event is already set, we can return a finished status immediately
status = DeviceStatus(self)
status.set_finished()
return status
status = DeviceStatus(self)
self.start_readout(status)
status.add_callback(lambda *args, **kwargs: self.scan_done_event.set())
return status
def stop(self, *, success=False):
self.shutdown_event.set()
self.scan_done_event.set()
if self.readout_thread is not None:
self.readout_thread.join()
return super().stop(success=success)
if __name__ == "__main__":
rtcontroller = RtFlomniController(
socket_cls=SocketIO, socket_host="mpc2844.psi.ch", socket_port=2222, device_manager=None

View File

@@ -1 +1 @@
from .csaxs_nexus import NeXus_format as cSAXS_NeXus_format
from .csaxs_nexus import cSAXSNeXusFormat

View File

@@ -1,445 +1,472 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any
import numpy as np
if TYPE_CHECKING:
from bec_lib.devicemanager import DeviceManagerBase
from bec_server.file_writer.file_writer import HDF5Storage
from bec_server.file_writer.default_writer import DefaultFormat
def get_entry(data: dict, name: str, default=None) -> Any:
class cSAXSNeXusFormat(DefaultFormat):
"""
Get an entry from the scan data assuming a <device>.<device>.value structure.
Args:
data (dict): Scan data
name (str): Entry name
default (Any, optional): Default value. Defaults to None.
NeXus file format for cSAXS beamline. This format is based on the default NeXus format, but with some additional entries specific to the cSAXS beamline. The structure of the file is based on the NeXus standard, but with some additional groups and datasets specific to the cSAXS beamline.
"""
if isinstance(data.get(name), list) and isinstance(data.get(name)[0], dict):
return [sub_data.get(name, {}).get("value", default) for sub_data in data.get(name)]
return data.get(name, {}).get(name, {}).get("value", default)
def format(self) -> None:
"""
Prepare the NeXus file format.
Override this method in file writer plugins to customize the HDF5 file format.
The class provides access to the following attributes:
- self.storage: The HDF5Storage object.
- self.data: The data dictionary.
- self.file_references: The file references dictionary, which has the link to external data.
- self.device_manager: The DeviceManagerBase object.
- self.get_entry(name, default=None): Helper method to get an entry from the data dictionary.
def NeXus_format(
storage: HDF5Storage, data: dict, file_references: dict, device_manager: DeviceManagerBase
) -> HDF5Storage:
"""
Prepare the NeXus file format.
See also: :class:`bec_server.file_writer.file_writer.HDF5Storage`.
Args:
storage (HDF5Storage): HDF5 storage. Pseudo hdf5 file container that will be written to disk later.
data (dict): scan data
file_references (dict): File references. Can be used to add external files to the HDF5 file. The path is given relative to the HDF5 file.
device_manager (DeviceManagerBase): Device manager. Can be used to check if devices are available.
"""
Returns:
HDF5Storage: Updated HDF5 storage
"""
# /entry
entry = storage.create_group("entry")
entry.attrs["NX_class"] = "NXentry"
entry.attrs["definition"] = "NXsas"
entry.attrs["start_time"] = data.get("start_time")
entry.attrs["end_time"] = data.get("end_time")
entry.attrs["version"] = 1.0
# entry = self.storage.create_group("entry")
# /entry/collection
collection = entry.create_group("collection")
collection.attrs["NX_class"] = "NXcollection"
bec_collection = collection.create_group("bec")
# # /entry/control
# control = entry.create_group("control")
# control.attrs["NX_class"] = "NXmonitor"
# control.create_dataset(name="mode", data="monitor")
# /entry/control
control = entry.create_group("control")
control.attrs["NX_class"] = "NXmonitor"
control.create_dataset(name="mode", data="monitor")
control.create_dataset(name="integral", data=get_entry(data, "bpm4i"))
# #########
# # EXAMPLE for soft link
# #########
# # /entry/data
# if "eiger_4" in self.device_manager.devices:
# entry.create_soft_link(name="data", target="/entry/instrument/eiger_4")
# /entry/data
main_data = entry.create_group("data")
main_data.attrs["NX_class"] = "NXdata"
if "eiger_4" in device_manager.devices:
main_data.create_soft_link(name="data", target="/entry/instrument/eiger_4/data")
elif "eiger9m" in device_manager.devices:
main_data.create_soft_link(name="data", target="/entry/instrument/eiger9m/data")
elif "pilatus_2" in device_manager.devices:
main_data.create_soft_link(name="data", target="/entry/instrument/pilatus_2/data")
# ########
# # EXAMPLE for external link
# ########
# # control = entry.create_group("sample")
# # control.create_ext_link("data", self.file_references["eiger9m"]["path"], "EG9M/data")
# /entry/sample
control = entry.create_group("sample")
control.attrs["NX_class"] = "NXsample"
control.create_dataset(name="name", data=get_entry(data, "samplename"))
control.create_dataset(name="description", data=data.get("sample_description"))
x_translation = control.create_dataset(name="x_translation", data=get_entry(data, "samx"))
x_translation.attrs["units"] = "mm"
y_translation = control.create_dataset(name="y_translation", data=get_entry(data, "samy"))
y_translation.attrs["units"] = "mm"
temperature_log = control.create_dataset(name="temperature_log", data=get_entry(data, "temp"))
temperature_log.attrs["units"] = "K"
# # /entry/sample
# control = entry.create_group("sample")
# control.attrs["NX_class"] = "NXsample"
# control.create_dataset(name="name", data=self.data.get("samplename"))
# control.create_dataset(name="description", data=self.data.get("sample_description"))
# /entry/instrument
instrument = entry.create_group("instrument")
instrument.attrs["NX_class"] = "NXinstrument"
instrument.create_dataset(name="name", data="cSAXS beamline")
# # /entry/instrument
# instrument = entry.create_group("instrument")
# instrument.attrs["NX_class"] = "NXinstrument"
source = instrument.create_group("source")
source.attrs["NX_class"] = "NXsource"
source.create_dataset(name="type", data="Synchrotron X-ray Source")
source.create_dataset(name="name", data="Swiss Light Source")
source.create_dataset(name="probe", data="x-ray")
distance = source.create_dataset(
name="distance", data=-33800 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
sigma_x = source.create_dataset(name="sigma_x", data=0.202)
sigma_x.attrs["units"] = "mm"
sigma_y = source.create_dataset(name="sigma_y", data=0.018)
sigma_y.attrs["units"] = "mm"
divergence_x = source.create_dataset(name="divergence_x", data=0.000135)
divergence_x.attrs["units"] = "radians"
divergence_y = source.create_dataset(name="divergence_y", data=0.000025)
divergence_y.attrs["units"] = "radians"
current = source.create_dataset(name="current", data=get_entry(data, "curr"))
current.attrs["units"] = "mA"
# source = instrument.create_group("source")
# source.attrs["NX_class"] = "NXsource"
# source.create_dataset(name="type", data="Synchrotron X-ray Source")
# source.create_dataset(name="name", data="Swiss Light Source")
# source.create_dataset(name="probe", data="x-ray")
insertion_device = instrument.create_group("insertion_device")
insertion_device.attrs["NX_class"] = "NXinsertion_device"
source.create_dataset(name="type", data="undulator")
gap = source.create_dataset(name="gap", data=get_entry(data, "idgap"))
gap.attrs["units"] = "mm"
k = source.create_dataset(name="k", data=2.46)
k.attrs["units"] = "NX_DIMENSIONLESS"
length = source.create_dataset(name="length", data=1820)
length.attrs["units"] = "mm"
# # /entry
# entry = self.storage.create_group("entry")
# entry.attrs["NX_class"] = "NXentry"
# entry.attrs["definition"] = "NXsas"
# entry.attrs["start_time"] = self.data.get("start_time")
# entry.attrs["end_time"] = self.data.get("end_time")
# entry.attrs["version"] = 1.0
slit_0 = instrument.create_group("slit_0")
slit_0.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="OFHC Cu")
source.create_dataset(name="description", data="Horizontal secondary source slit")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl0wh"))
x_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl0ch"))
x_translation.attrs["units"] = "mm"
distance = source.create_dataset(
name="distance", data=-21700 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
# # /entry/control
# control = entry.create_group("control")
# control.attrs["NX_class"] = "NXmonitor"
# control.create_dataset(name="mode", data="monitor")
# control.create_dataset(name="integral", data=self.get_entry("bpm4i"))
slit_1 = instrument.create_group("slit_1")
slit_1.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="OFHC Cu")
source.create_dataset(name="description", data="Horizontal secondary source slit")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl1wh"))
x_gap.attrs["units"] = "mm"
y_gap = source.create_dataset(name="y_gap", data=get_entry(data, "sl1wv"))
y_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl1ch"))
x_translation.attrs["units"] = "mm"
height = source.create_dataset(name="x_translation", data=get_entry(data, "sl1ch"))
height.attrs["units"] = "mm"
distance = source.create_dataset(
name="distance", data=-7800 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
# # /entry/data
# main_data = entry.create_group("data")
# main_data.attrs["NX_class"] = "NXdata"
# if "eiger_4" in self.device_manager.devices:
# main_data.create_soft_link(name="data", target="/entry/instrument/eiger_4/data")
# elif "eiger9m" in self.device_manager.devices:
# main_data.create_soft_link(name="data", target="/entry/instrument/eiger9m/data")
# elif "pilatus_2" in self.device_manager.devices:
# main_data.create_soft_link(name="data", target="/entry/instrument/pilatus_2/data")
mono = instrument.create_group("monochromator")
mono.attrs["NX_class"] = "NXmonochromator"
mokev = data.get("mokev", {})
if mokev:
if isinstance(mokev, list):
mokev = mokev[0]
wavelength = mono.create_dataset(
name="wavelength", data=12.3984193 / (mokev.get("mokev").get("value") + 1e-9)
)
wavelength.attrs["units"] = "Angstrom"
energy = mono.create_dataset(name="energy", data=mokev.get("mokev").get("value"))
energy.attrs["units"] = "keV"
mono.create_dataset(name="type", data="Double crystal fixed exit monochromator.")
distance = mono.create_dataset(
name="distance", data=-5220 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
# # /entry/sample
# control = entry.create_group("sample")
# control.attrs["NX_class"] = "NXsample"
# control.create_dataset(name="name", data=self.get_entry("samplename"))
# control.create_dataset(name="description", data=self.data.get("sample_description"))
# x_translation = control.create_dataset(name="x_translation", data=self.get_entry("samx"))
# x_translation.attrs["units"] = "mm"
# y_translation = control.create_dataset(name="y_translation", data=self.get_entry("samy"))
# y_translation.attrs["units"] = "mm"
# temperature_log = control.create_dataset(
# name="temperature_log", data=self.get_entry("temp")
# )
# temperature_log.attrs["units"] = "K"
crystal_1 = mono.create_group("crystal_1")
crystal_1.attrs["NX_class"] = "NXcrystal"
crystal_1.create_dataset(name="usage", data="Bragg")
crystal_1.create_dataset(name="order_no", data="1")
crystal_1.create_dataset(name="reflection", data="[1 1 1]")
bragg_angle = crystal_1.create_dataset(name="bragg_angle", data=get_entry(data, "moth1"))
bragg_angle.attrs["units"] = "degrees"
# # /entry/instrument
# instrument = entry.create_group("instrument")
# instrument.attrs["NX_class"] = "NXinstrument"
# instrument.create_dataset(name="name", data="cSAXS beamline")
crystal_2 = mono.create_group("crystal_2")
crystal_2.attrs["NX_class"] = "NXcrystal"
crystal_2.create_dataset(name="usage", data="Bragg")
crystal_2.create_dataset(name="order_no", data="2")
crystal_2.create_dataset(name="reflection", data="[1 1 1]")
bragg_angle = crystal_2.create_dataset(name="bragg_angle", data=get_entry(data, "moth1"))
bragg_angle.attrs["units"] = "degrees"
bend_x = crystal_2.create_dataset(name="bend_x", data=get_entry(data, "mobd"))
bend_x.attrs["units"] = "degrees"
# source = instrument.create_group("source")
# source.attrs["NX_class"] = "NXsource"
# source.create_dataset(name="type", data="Synchrotron X-ray Source")
# source.create_dataset(name="name", data="Swiss Light Source")
# source.create_dataset(name="probe", data="x-ray")
# distance = source.create_dataset(
# name="distance", data=-33800 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
# sigma_x = source.create_dataset(name="sigma_x", data=0.202)
# sigma_x.attrs["units"] = "mm"
# sigma_y = source.create_dataset(name="sigma_y", data=0.018)
# sigma_y.attrs["units"] = "mm"
# divergence_x = source.create_dataset(name="divergence_x", data=0.000135)
# divergence_x.attrs["units"] = "radians"
# divergence_y = source.create_dataset(name="divergence_y", data=0.000025)
# divergence_y.attrs["units"] = "radians"
# current = source.create_dataset(name="current", data=self.get_entry("curr"))
# current.attrs["units"] = "mA"
xbpm4 = instrument.create_group("XBPM4")
xbpm4.attrs["NX_class"] = "NXdetector"
xbpm4_sum = xbpm4.create_group("XBPM4_sum")
xbpm4_sum_data = xbpm4_sum.create_dataset(name="data", data=get_entry(data, "bpm4s"))
xbpm4_sum_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm4_sum.create_dataset(name="description", data="Sum of counts for the four quadrants.")
xbpm4_x = xbpm4.create_group("XBPM4_x")
xbpm4_x_data = xbpm4_x.create_dataset(name="data", data=get_entry(data, "bpm4x"))
xbpm4_x_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm4_x.create_dataset(
name="description", data="Normalized difference of counts between left and right quadrants."
)
xbpm4_y = xbpm4.create_group("XBPM4_y")
xbpm4_y_data = xbpm4_y.create_dataset(name="data", data=get_entry(data, "bpm4y"))
xbpm4_y_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm4_y.create_dataset(
name="description", data="Normalized difference of counts between high and low quadrants."
)
xbpm4_skew = xbpm4.create_group("XBPM4_skew")
xbpm4_skew_data = xbpm4_skew.create_dataset(name="data", data=get_entry(data, "bpm4z"))
xbpm4_skew_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm4_skew.create_dataset(
name="description", data="Normalized difference of counts between diagonal quadrants."
)
# insertion_device = instrument.create_group("insertion_device")
# insertion_device.attrs["NX_class"] = "NXinsertion_device"
# source.create_dataset(name="type", data="undulator")
# gap = source.create_dataset(name="gap", data=self.get_entry("idgap"))
# gap.attrs["units"] = "mm"
# k = source.create_dataset(name="k", data=2.46)
# k.attrs["units"] = "NX_DIMENSIONLESS"
# length = source.create_dataset(name="length", data=1820)
# length.attrs["units"] = "mm"
mirror = instrument.create_group("mirror")
mirror.attrs["NX_class"] = "NXmirror"
mirror.create_dataset(name="type", data="single")
mirror.create_dataset(
name="description",
data="Grazing incidence mirror to reject high-harmonic wavelengths from the monochromator. There are three coating options available that are used depending on the X-ray energy, no coating (SiO2), rhodium (Rh) or platinum (Pt).",
)
incident_angle = mirror.create_dataset(name="incident_angle", data=get_entry(data, "mith"))
incident_angle.attrs["units"] = "degrees"
substrate_material = mirror.create_dataset(name="substrate_material", data="SiO2")
substrate_material.attrs["units"] = "NX_CHAR"
coating_material = mirror.create_dataset(name="coating_material", data="SiO2")
coating_material.attrs["units"] = "NX_CHAR"
bend_y = mirror.create_dataset(name="bend_y", data="mibd")
bend_y.attrs["units"] = "NX_DIMENSIONLESS"
distance = mirror.create_dataset(
name="distance", data=-4370 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
# slit_0 = instrument.create_group("slit_0")
# slit_0.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="OFHC Cu")
# source.create_dataset(name="description", data="Horizontal secondary source slit")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl0wh"))
# x_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl0ch"))
# x_translation.attrs["units"] = "mm"
# distance = source.create_dataset(
# name="distance", data=-21700 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
xbpm5 = instrument.create_group("XBPM5")
xbpm5.attrs["NX_class"] = "NXdetector"
xbpm5_sum = xbpm5.create_group("XBPM5_sum")
xbpm5_sum_data = xbpm5_sum.create_dataset(name="data", data=get_entry(data, "bpm5s"))
xbpm5_sum_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm5_sum.create_dataset(name="description", data="Sum of counts for the four quadrants.")
xbpm5_x = xbpm5.create_group("XBPM5_x")
xbpm5_x_data = xbpm5_x.create_dataset(name="data", data=get_entry(data, "bpm5x"))
xbpm5_x_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm5_x.create_dataset(
name="description", data="Normalized difference of counts between left and right quadrants."
)
xbpm5_y = xbpm5.create_group("XBPM5_y")
xbpm5_y_data = xbpm5_y.create_dataset(name="data", data=get_entry(data, "bpm5y"))
xbpm5_y_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm5_y.create_dataset(
name="description", data="Normalized difference of counts between high and low quadrants."
)
xbpm5_skew = xbpm5.create_group("XBPM5_skew")
xbpm5_skew_data = xbpm5_skew.create_dataset(name="data", data=get_entry(data, "bpm5z"))
xbpm5_skew_data.attrs["units"] = "NX_DIMENSIONLESS"
xbpm5_skew.create_dataset(
name="description", data="Normalized difference of counts between diagonal quadrants."
)
# slit_1 = instrument.create_group("slit_1")
# slit_1.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="OFHC Cu")
# source.create_dataset(name="description", data="Horizontal secondary source slit")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl1wh"))
# x_gap.attrs["units"] = "mm"
# y_gap = source.create_dataset(name="y_gap", data=self.get_entry("sl1wv"))
# y_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl1ch"))
# x_translation.attrs["units"] = "mm"
# height = source.create_dataset(name="x_translation", data=self.get_entry("sl1ch"))
# height.attrs["units"] = "mm"
# distance = source.create_dataset(
# name="distance", data=-7800 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
slit_2 = instrument.create_group("slit_2")
slit_2.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="Ag")
source.create_dataset(name="description", data="Slit 2, optics hutch")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl2wh"))
x_gap.attrs["units"] = "mm"
y_gap = source.create_dataset(name="y_gap", data=get_entry(data, "sl2wv"))
y_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl2ch"))
x_translation.attrs["units"] = "mm"
height = source.create_dataset(name="x_translation", data=get_entry(data, "sl2cv"))
height.attrs["units"] = "mm"
distance = source.create_dataset(
name="distance", data=-3140 - np.asarray(get_entry(data, "samz", 0))
)
distance.attrs["units"] = "mm"
# mono = instrument.create_group("monochromator")
# mono.attrs["NX_class"] = "NXmonochromator"
# mokev = self.data.get("mokev", {})
# if mokev:
# if isinstance(mokev, list):
# mokev = mokev[0]
# wavelength = mono.create_dataset(
# name="wavelength", data=12.3984193 / (mokev.get("mokev").get("value") + 1e-9)
# )
# wavelength.attrs["units"] = "Angstrom"
# energy = mono.create_dataset(name="energy", data=mokev.get("mokev").get("value"))
# energy.attrs["units"] = "keV"
# mono.create_dataset(name="type", data="Double crystal fixed exit monochromator.")
# distance = mono.create_dataset(
# name="distance", data=-5220 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
slit_3 = instrument.create_group("slit_3")
slit_3.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="Si")
source.create_dataset(name="description", data="Slit 3, experimental hutch, exposure box")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl3wh"))
x_gap.attrs["units"] = "mm"
y_gap = source.create_dataset(name="y_gap", data=get_entry(data, "sl3wv"))
y_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl3ch"))
x_translation.attrs["units"] = "mm"
height = source.create_dataset(name="x_translation", data=get_entry(data, "sl3cv"))
height.attrs["units"] = "mm"
# distance = source.create_dataset(name="distance", data=-3140 - get_entry(data, "samz", 0))
# distance.attrs["units"] = "mm"
# crystal_1 = mono.create_group("crystal_1")
# crystal_1.attrs["NX_class"] = "NXcrystal"
# crystal_1.create_dataset(name="usage", data="Bragg")
# crystal_1.create_dataset(name="order_no", data="1")
# crystal_1.create_dataset(name="reflection", data="[1 1 1]")
# bragg_angle = crystal_1.create_dataset(name="bragg_angle", data=self.get_entry("moth1"))
# bragg_angle.attrs["units"] = "degrees"
filter_set = instrument.create_group("filter_set")
filter_set.attrs["NX_class"] = "NXattenuator"
filter_set.create_dataset(name="material", data="Si")
filter_set.create_dataset(
name="description",
data="The filter set consists of 4 linear stages, each with five filter positions. Additionally, each one allows for an out position to allow 'no filtering'.",
)
attenuator_transmission = filter_set.create_dataset(
name="attenuator_transmission", data=10 ** get_entry(data, "ftrans", 0)
)
attenuator_transmission.attrs["units"] = "NX_DIMENSIONLESS"
# crystal_2 = mono.create_group("crystal_2")
# crystal_2.attrs["NX_class"] = "NXcrystal"
# crystal_2.create_dataset(name="usage", data="Bragg")
# crystal_2.create_dataset(name="order_no", data="2")
# crystal_2.create_dataset(name="reflection", data="[1 1 1]")
# bragg_angle = crystal_2.create_dataset(name="bragg_angle", data=self.get_entry("moth1"))
# bragg_angle.attrs["units"] = "degrees"
# bend_x = crystal_2.create_dataset(name="bend_x", data=self.get_entry("mobd"))
# bend_x.attrs["units"] = "degrees"
slit_4 = instrument.create_group("slit_4")
slit_4.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="Si")
source.create_dataset(name="description", data="Slit 4, experimental hutch, exposure box")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl4wh"))
x_gap.attrs["units"] = "mm"
y_gap = source.create_dataset(name="y_gap", data=get_entry(data, "sl4wv"))
y_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl4ch"))
x_translation.attrs["units"] = "mm"
height = source.create_dataset(name="x_translation", data=get_entry(data, "sl4cv"))
height.attrs["units"] = "mm"
# distance = source.create_dataset(name="distance", data=-3140 - get_entry(data, "samz", 0))
# distance.attrs["units"] = "mm"
# xbpm4 = instrument.create_group("XBPM4")
# xbpm4.attrs["NX_class"] = "NXdetector"
# xbpm4_sum = xbpm4.create_group("XBPM4_sum")
# xbpm4_sum_data = xbpm4_sum.create_dataset(name="data", data=self.get_entry("bpm4s"))
# xbpm4_sum_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm4_sum.create_dataset(name="description", data="Sum of counts for the four quadrants.")
# xbpm4_x = xbpm4.create_group("XBPM4_x")
# xbpm4_x_data = xbpm4_x.create_dataset(name="data", data=self.get_entry("bpm4x"))
# xbpm4_x_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm4_x.create_dataset(
# name="description",
# data="Normalized difference of counts between left and right quadrants.",
# )
# xbpm4_y = xbpm4.create_group("XBPM4_y")
# xbpm4_y_data = xbpm4_y.create_dataset(name="data", data=self.get_entry("bpm4y"))
# xbpm4_y_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm4_y.create_dataset(
# name="description",
# data="Normalized difference of counts between high and low quadrants.",
# )
# xbpm4_skew = xbpm4.create_group("XBPM4_skew")
# xbpm4_skew_data = xbpm4_skew.create_dataset(name="data", data=self.get_entry("bpm4z"))
# xbpm4_skew_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm4_skew.create_dataset(
# name="description", data="Normalized difference of counts between diagonal quadrants."
# )
slit_5 = instrument.create_group("slit_5")
slit_5.attrs["NX_class"] = "NXslit"
source.create_dataset(name="material", data="Si")
source.create_dataset(name="description", data="Slit 5, experimental hutch, exposure box")
x_gap = source.create_dataset(name="x_gap", data=get_entry(data, "sl5wh"))
x_gap.attrs["units"] = "mm"
y_gap = source.create_dataset(name="y_gap", data=get_entry(data, "sl5wv"))
y_gap.attrs["units"] = "mm"
x_translation = source.create_dataset(name="x_translation", data=get_entry(data, "sl5ch"))
x_translation.attrs["units"] = "mm"
height = source.create_dataset(name="x_translation", data=get_entry(data, "sl5cv"))
height.attrs["units"] = "mm"
# distance = source.create_dataset(name="distance", data=-3140 - get_entry(data, "samz", 0))
# distance.attrs["units"] = "mm"
# mirror = instrument.create_group("mirror")
# mirror.attrs["NX_class"] = "NXmirror"
# mirror.create_dataset(name="type", data="single")
# mirror.create_dataset(
# name="description",
# data="Grazing incidence mirror to reject high-harmonic wavelengths from the monochromator. There are three coating options available that are used depending on the X-ray energy, no coating (SiO2), rhodium (Rh) or platinum (Pt).",
# )
# incident_angle = mirror.create_dataset(name="incident_angle", data=self.get_entry("mith"))
# incident_angle.attrs["units"] = "degrees"
# substrate_material = mirror.create_dataset(name="substrate_material", data="SiO2")
# substrate_material.attrs["units"] = "NX_CHAR"
# coating_material = mirror.create_dataset(name="coating_material", data="SiO2")
# coating_material.attrs["units"] = "NX_CHAR"
# bend_y = mirror.create_dataset(name="bend_y", data="mibd")
# bend_y.attrs["units"] = "NX_DIMENSIONLESS"
# distance = mirror.create_dataset(
# name="distance", data=-4370 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
beam_stop_1 = instrument.create_group("beam_stop_1")
beam_stop_1.attrs["NX_class"] = "NX_beamstop"
beam_stop_1.create_dataset(name="description", data="circular")
bms1_size = beam_stop_1.create_dataset(name="size", data=3)
bms1_size.attrs["units"] = "mm"
bms1_x = beam_stop_1.create_dataset(name="size", data=get_entry(data, "bs1x"))
bms1_x.attrs["units"] = "mm"
bms1_y = beam_stop_1.create_dataset(name="size", data=get_entry(data, "bs1y"))
bms1_y.attrs["units"] = "mm"
# xbpm5 = instrument.create_group("XBPM5")
# xbpm5.attrs["NX_class"] = "NXdetector"
# xbpm5_sum = xbpm5.create_group("XBPM5_sum")
# xbpm5_sum_data = xbpm5_sum.create_dataset(name="data", data=self.get_entry("bpm5s"))
# xbpm5_sum_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm5_sum.create_dataset(name="description", data="Sum of counts for the four quadrants.")
# xbpm5_x = xbpm5.create_group("XBPM5_x")
# xbpm5_x_data = xbpm5_x.create_dataset(name="data", data=self.get_entry("bpm5x"))
# xbpm5_x_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm5_x.create_dataset(
# name="description",
# data="Normalized difference of counts between left and right quadrants.",
# )
# xbpm5_y = xbpm5.create_group("XBPM5_y")
# xbpm5_y_data = xbpm5_y.create_dataset(name="data", data=self.get_entry("bpm5y"))
# xbpm5_y_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm5_y.create_dataset(
# name="description",
# data="Normalized difference of counts between high and low quadrants.",
# )
# xbpm5_skew = xbpm5.create_group("XBPM5_skew")
# xbpm5_skew_data = xbpm5_skew.create_dataset(name="data", data=self.get_entry("bpm5z"))
# xbpm5_skew_data.attrs["units"] = "NX_DIMENSIONLESS"
# xbpm5_skew.create_dataset(
# name="description", data="Normalized difference of counts between diagonal quadrants."
# )
beam_stop_2 = instrument.create_group("beam_stop_2")
beam_stop_2.attrs["NX_class"] = "NX_beamstop"
beam_stop_2.create_dataset(name="description", data="rectangular")
bms2_size_x = beam_stop_2.create_dataset(name="size_x", data=5)
bms2_size_x.attrs["units"] = "mm"
bms2_size_y = beam_stop_2.create_dataset(name="size_y", data=2.25)
bms2_size_y.attrs["units"] = "mm"
bms2_x = beam_stop_2.create_dataset(name="size", data=get_entry(data, "bs2x"))
bms2_x.attrs["units"] = "mm"
bms2_y = beam_stop_2.create_dataset(name="size", data=get_entry(data, "bs2y"))
bms2_y.attrs["units"] = "mm"
bms2_data = beam_stop_2.create_dataset(name="data", data=get_entry(data, "diode"))
bms2_data.attrs["units"] = "NX_DIMENSIONLESS"
# slit_2 = instrument.create_group("slit_2")
# slit_2.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="Ag")
# source.create_dataset(name="description", data="Slit 2, optics hutch")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl2wh"))
# x_gap.attrs["units"] = "mm"
# y_gap = source.create_dataset(name="y_gap", data=self.get_entry("sl2wv"))
# y_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl2ch"))
# x_translation.attrs["units"] = "mm"
# height = source.create_dataset(name="x_translation", data=self.get_entry("sl2cv"))
# height.attrs["units"] = "mm"
# distance = source.create_dataset(
# name="distance", data=-3140 - np.asarray(self.get_entry("samz", 0))
# )
# distance.attrs["units"] = "mm"
if "eiger1p5m" in device_manager.devices and device_manager.devices.eiger1p5m.enabled:
eiger_4 = instrument.create_group("eiger_4")
eiger_4.attrs["NX_class"] = "NXdetector"
x_pixel_size = eiger_4.create_dataset(name="x_pixel_size", data=75)
x_pixel_size.attrs["units"] = "um"
y_pixel_size = eiger_4.create_dataset(name="y_pixel_size", data=75)
y_pixel_size.attrs["units"] = "um"
polar_angle = eiger_4.create_dataset(name="polar_angle", data=0)
polar_angle.attrs["units"] = "degrees"
azimuthal_angle = eiger_4.create_dataset(name="azimuthal_angle", data=0)
azimuthal_angle.attrs["units"] = "degrees"
rotation_angle = eiger_4.create_dataset(name="rotation_angle", data=0)
rotation_angle.attrs["units"] = "degrees"
description = eiger_4.create_dataset(
name="description", data="Single-photon counting detector, 320 micron-thick Si chip"
)
orientation = eiger_4.create_group("orientation")
orientation.attrs["description"] = (
"Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
)
orientation.create_dataset(name="transpose", data=1)
orientation.create_dataset(name="rot90", data=3)
# slit_3 = instrument.create_group("slit_3")
# slit_3.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="Si")
# source.create_dataset(name="description", data="Slit 3, experimental hutch, exposure box")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl3wh"))
# x_gap.attrs["units"] = "mm"
# y_gap = source.create_dataset(name="y_gap", data=self.get_entry("sl3wv"))
# y_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl3ch"))
# x_translation.attrs["units"] = "mm"
# height = source.create_dataset(name="x_translation", data=self.get_entry("sl3cv"))
# height.attrs["units"] = "mm"
# # distance = source.create_dataset(name="distance", data=-3140 - self.get_entry("samz", 0))
# # distance.attrs["units"] = "mm"
if (
"eiger9m" in device_manager.devices
and device_manager.devices.eiger9m.enabled
and "eiger9m" in file_references
):
eiger9m = instrument.create_group("eiger9m")
eiger9m.attrs["NX_class"] = "NXdetector"
x_pixel_size = eiger9m.create_dataset(name="x_pixel_size", data=75)
x_pixel_size.attrs["units"] = "um"
y_pixel_size = eiger9m.create_dataset(name="y_pixel_size", data=75)
y_pixel_size.attrs["units"] = "um"
polar_angle = eiger9m.create_dataset(name="polar_angle", data=0)
polar_angle.attrs["units"] = "degrees"
azimuthal_angle = eiger9m.create_dataset(name="azimuthal_angle", data=0)
azimuthal_angle.attrs["units"] = "degrees"
rotation_angle = eiger9m.create_dataset(name="rotation_angle", data=0)
rotation_angle.attrs["units"] = "degrees"
description = eiger9m.create_dataset(
name="description", data="Eiger9M detector, in-house developed, Paul Scherrer Institute"
)
orientation = eiger9m.create_group("orientation")
orientation.attrs["description"] = (
"Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
)
orientation.create_dataset(name="transpose", data=1)
orientation.create_dataset(name="rot90", data=3)
data = eiger9m.create_ext_link("data", file_references["eiger9m"]["path"], "EG9M/data")
status = eiger9m.create_ext_link(
"status", file_references["eiger9m"]["path"], "EG9M/status"
)
# filter_set = instrument.create_group("filter_set")
# filter_set.attrs["NX_class"] = "NXattenuator"
# filter_set.create_dataset(name="material", data="Si")
# filter_set.create_dataset(
# name="description",
# data="The filter set consists of 4 linear stages, each with five filter positions. Additionally, each one allows for an out position to allow 'no filtering'.",
# )
# attenuator_transmission = filter_set.create_dataset(
# name="attenuator_transmission", data=10 ** self.get_entry("ftrans", 0)
# )
# attenuator_transmission.attrs["units"] = "NX_DIMENSIONLESS"
if (
"pilatus_2" in device_manager.devices
and device_manager.devices.pilatus_2.enabled
and "pilatus_2" in file_references
):
pilatus_2 = instrument.create_group("pilatus_2")
pilatus_2.attrs["NX_class"] = "NXdetector"
x_pixel_size = pilatus_2.create_dataset(name="x_pixel_size", data=172)
x_pixel_size.attrs["units"] = "um"
y_pixel_size = pilatus_2.create_dataset(name="y_pixel_size", data=172)
y_pixel_size.attrs["units"] = "um"
polar_angle = pilatus_2.create_dataset(name="polar_angle", data=0)
polar_angle.attrs["units"] = "degrees"
azimuthal_angle = pilatus_2.create_dataset(name="azimuthal_angle", data=0)
azimuthal_angle.attrs["units"] = "degrees"
rotation_angle = pilatus_2.create_dataset(name="rotation_angle", data=0)
rotation_angle.attrs["units"] = "degrees"
description = pilatus_2.create_dataset(
name="description", data="Pilatus 300K detector, Dectris, Switzerland"
)
orientation = pilatus_2.create_group("orientation")
orientation.attrs["description"] = (
"Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
)
orientation.create_dataset(name="transpose", data=1)
orientation.create_dataset(name="rot90", data=2)
data = pilatus_2.create_ext_link(
"data", file_references["pilatus_2"]["path"], "entry/instrument/pilatus_2/data"
)
# slit_4 = instrument.create_group("slit_4")
# slit_4.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="Si")
# source.create_dataset(name="description", data="Slit 4, experimental hutch, exposure box")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl4wh"))
# x_gap.attrs["units"] = "mm"
# y_gap = source.create_dataset(name="y_gap", data=self.get_entry("sl4wv"))
# y_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl4ch"))
# x_translation.attrs["units"] = "mm"
# height = source.create_dataset(name="x_translation", data=self.get_entry("sl4cv"))
# height.attrs["units"] = "mm"
# # distance = source.create_dataset(name="distance", data=-3140 - self.get_entry("samz", 0))
# # distance.attrs["units"] = "mm"
if (
"falcon" in device_manager.devices
and device_manager.devices.falcon.enabled
and "falcon" in file_references
):
falcon = instrument.create_ext_link(
"falcon", file_references["falcon"]["path"], "entry/instrument/FalconX1"
)
# slit_5 = instrument.create_group("slit_5")
# slit_5.attrs["NX_class"] = "NXslit"
# source.create_dataset(name="material", data="Si")
# source.create_dataset(name="description", data="Slit 5, experimental hutch, exposure box")
# x_gap = source.create_dataset(name="x_gap", data=self.get_entry("sl5wh"))
# x_gap.attrs["units"] = "mm"
# y_gap = source.create_dataset(name="y_gap", data=self.get_entry("sl5wv"))
# y_gap.attrs["units"] = "mm"
# x_translation = source.create_dataset(name="x_translation", data=self.get_entry("sl5ch"))
# x_translation.attrs["units"] = "mm"
# height = source.create_dataset(name="x_translation", data=self.get_entry("sl5cv"))
# height.attrs["units"] = "mm"
# # distance = source.create_dataset(name="distance", data=-3140 - self.get_entry("samz", 0))
# # distance.attrs["units"] = "mm"
return storage
# beam_stop_1 = instrument.create_group("beam_stop_1")
# beam_stop_1.attrs["NX_class"] = "NX_beamstop"
# beam_stop_1.create_dataset(name="description", data="circular")
# bms1_size = beam_stop_1.create_dataset(name="size", data=3)
# bms1_size.attrs["units"] = "mm"
# bms1_x = beam_stop_1.create_dataset(name="size", data=self.get_entry("bs1x"))
# bms1_x.attrs["units"] = "mm"
# bms1_y = beam_stop_1.create_dataset(name="size", data=self.get_entry("bs1y"))
# bms1_y.attrs["units"] = "mm"
# beam_stop_2 = instrument.create_group("beam_stop_2")
# beam_stop_2.attrs["NX_class"] = "NX_beamstop"
# beam_stop_2.create_dataset(name="description", data="rectangular")
# bms2_size_x = beam_stop_2.create_dataset(name="size_x", data=5)
# bms2_size_x.attrs["units"] = "mm"
# bms2_size_y = beam_stop_2.create_dataset(name="size_y", data=2.25)
# bms2_size_y.attrs["units"] = "mm"
# bms2_x = beam_stop_2.create_dataset(name="size", data=self.get_entry("bs2x"))
# bms2_x.attrs["units"] = "mm"
# bms2_y = beam_stop_2.create_dataset(name="size", data=self.get_entry("bs2y"))
# bms2_y.attrs["units"] = "mm"
# bms2_data = beam_stop_2.create_dataset(name="data", data=self.get_entry("diode"))
# bms2_data.attrs["units"] = "NX_DIMENSIONLESS"
# if (
# "eiger1p5m" in self.device_manager.devices
# and self.device_manager.devices.eiger1p5m.enabled
# ):
# eiger_4 = instrument.create_group("eiger_4")
# eiger_4.attrs["NX_class"] = "NXdetector"
# x_pixel_size = eiger_4.create_dataset(name="x_pixel_size", data=75)
# x_pixel_size.attrs["units"] = "um"
# y_pixel_size = eiger_4.create_dataset(name="y_pixel_size", data=75)
# y_pixel_size.attrs["units"] = "um"
# polar_angle = eiger_4.create_dataset(name="polar_angle", data=0)
# polar_angle.attrs["units"] = "degrees"
# azimuthal_angle = eiger_4.create_dataset(name="azimuthal_angle", data=0)
# azimuthal_angle.attrs["units"] = "degrees"
# rotation_angle = eiger_4.create_dataset(name="rotation_angle", data=0)
# rotation_angle.attrs["units"] = "degrees"
# description = eiger_4.create_dataset(
# name="description", data="Single-photon counting detector, 320 micron-thick Si chip"
# )
# orientation = eiger_4.create_group("orientation")
# orientation.attrs["description"] = (
# "Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
# )
# orientation.create_dataset(name="transpose", data=1)
# orientation.create_dataset(name="rot90", data=3)
# if (
# "eiger9m" in self.device_manager.devices
# and self.device_manager.devices.eiger9m.enabled
# and "eiger9m" in self.file_references
# ):
# eiger9m = instrument.create_group("eiger9m")
# eiger9m.attrs["NX_class"] = "NXdetector"
# x_pixel_size = eiger9m.create_dataset(name="x_pixel_size", data=75)
# x_pixel_size.attrs["units"] = "um"
# y_pixel_size = eiger9m.create_dataset(name="y_pixel_size", data=75)
# y_pixel_size.attrs["units"] = "um"
# polar_angle = eiger9m.create_dataset(name="polar_angle", data=0)
# polar_angle.attrs["units"] = "degrees"
# azimuthal_angle = eiger9m.create_dataset(name="azimuthal_angle", data=0)
# azimuthal_angle.attrs["units"] = "degrees"
# rotation_angle = eiger9m.create_dataset(name="rotation_angle", data=0)
# rotation_angle.attrs["units"] = "degrees"
# description = eiger9m.create_dataset(
# name="description",
# data="Eiger9M detector, in-house developed, Paul Scherrer Institute",
# )
# orientation = eiger9m.create_group("orientation")
# orientation.attrs["description"] = (
# "Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
# )
# orientation.create_dataset(name="transpose", data=1)
# orientation.create_dataset(name="rot90", data=3)
# data = eiger9m.create_ext_link(
# "data", self.file_references["eiger9m"]["path"], "EG9M/data"
# )
# status = eiger9m.create_ext_link(
# "status", self.file_references["eiger9m"]["path"], "EG9M/status"
# )
# if (
# "pilatus_2" in self.device_manager.devices
# and self.device_manager.devices.pilatus_2.enabled
# and "pilatus_2" in self.file_references
# ):
# pilatus_2 = instrument.create_group("pilatus_2")
# pilatus_2.attrs["NX_class"] = "NXdetector"
# x_pixel_size = pilatus_2.create_dataset(name="x_pixel_size", data=172)
# x_pixel_size.attrs["units"] = "um"
# y_pixel_size = pilatus_2.create_dataset(name="y_pixel_size", data=172)
# y_pixel_size.attrs["units"] = "um"
# polar_angle = pilatus_2.create_dataset(name="polar_angle", data=0)
# polar_angle.attrs["units"] = "degrees"
# azimuthal_angle = pilatus_2.create_dataset(name="azimuthal_angle", data=0)
# azimuthal_angle.attrs["units"] = "degrees"
# rotation_angle = pilatus_2.create_dataset(name="rotation_angle", data=0)
# rotation_angle.attrs["units"] = "degrees"
# description = pilatus_2.create_dataset(
# name="description", data="Pilatus 300K detector, Dectris, Switzerland"
# )
# orientation = pilatus_2.create_group("orientation")
# orientation.attrs["description"] = (
# "Orientation defines the number of counterclockwise rotations by 90 deg followed by a transposition to reach the 'cameraman orientation', that is looking towards the beam."
# )
# orientation.create_dataset(name="transpose", data=1)
# orientation.create_dataset(name="rot90", data=2)
# data = pilatus_2.create_ext_link(
# "data", self.file_references["pilatus_2"]["path"], "entry/instrument/pilatus_2/data"
# )
# if (
# "falcon" in self.device_manager.devices
# and self.device_manager.devices.falcon.enabled
# and "falcon" in self.file_references
# ):
# falcon = instrument.create_ext_link(
# "falcon", self.file_references["falcon"]["path"], "entry/instrument/FalconX1"
# )

View File

@@ -27,20 +27,19 @@ from bec_lib import bec_logger, messages
from bec_lib.alarm_handler import Alarms
from bec_lib.endpoints import MessageEndpoints
from bec_server.scan_server.errors import ScanAbortion
from bec_server.scan_server.scans import SyncFlyScanBase
from bec_server.scan_server.scans import AsyncFlyScanBase
from csaxs_bec.devices.epics.delay_generator_csaxs.delay_generator_csaxs import TRIGGERSOURCE
logger = bec_logger.logger
class FlomniFermatScan(SyncFlyScanBase):
class FlomniFermatScan(AsyncFlyScanBase):
scan_name = "flomni_fermat_scan"
scan_type = "fly"
required_kwargs = ["fovx", "fovy", "exp_time", "step", "angle"]
arg_input = {}
arg_bundle_size = {"bundle": len(arg_input), "min": None, "max": None}
use_scan_progress_report = True
def __init__(
self,
@@ -104,6 +103,14 @@ class FlomniFermatScan(SyncFlyScanBase):
self.zshift = -100
self.flomni_rotation_status = None
def scan_report_instructions(self):
"""Scan report instructions for the progress bar"""
yield from self.stubs.scan_report_instruction({"device_progress": ["rt_positions"]})
@property
def monitor_sync(self) -> str:
return "rt_positions"
def initialize(self):
self.scan_motors = []
self.update_readout_priority()
@@ -113,10 +120,6 @@ class FlomniFermatScan(SyncFlyScanBase):
self.positions, corridor_size=self.optim_trajectory_corridor
)
@property
def monitor_sync(self):
return "rt_flomni"
def reverse_trajectory(self):
"""
Reverse the trajectory. Every other scan should be reversed to
@@ -290,26 +293,18 @@ class FlomniFermatScan(SyncFlyScanBase):
return np.array(positions)
def scan_core(self):
# use a device message to receive the scan number and
# scan ID before sending the message to the device server
yield from self.stubs.kickoff(device="rtx")
while True:
yield from self.stubs.read(group="monitored")
status = self.connector.get(MessageEndpoints.device_status("rt_scan"))
if status:
status_id = status.content.get("status", 1)
request_id = status.metadata.get("RID")
if status_id == 0 and self.metadata.get("RID") == request_id:
break
if status_id == 2 and self.metadata.get("RID") == request_id:
raise ScanAbortion(
"An error occured during the flomni readout:"
f" {status.metadata.get('error')}"
)
# send off the flyer
yield from self.stubs.kickoff(device="rt_positions")
# start the readout loop of the flyer
status = yield from self.stubs.complete(device="rt_positions", wait=False)
# read the monitors until the flyer is done
while not status.done:
yield from self.stubs.read(group="monitored", point_id=self.point_id)
self.point_id += 1
time.sleep(1)
logger.debug("reading monitors")
# yield from self.device_rpc("rtx", "controller.kickoff")
def move_to_start(self):
"""return to the start position"""
@@ -336,6 +331,7 @@ class FlomniFermatScan(SyncFlyScanBase):
yield from self.read_scan_motors()
self.prepare_positions()
yield from self._prepare_setup()
yield from self.scan_report_instructions()
yield from self.open_scan()
yield from self.stage()
yield from self.run_baseline_reading()

View File

@@ -217,6 +217,16 @@ def test_mcs_card_csaxs_complete_and_stop(mock_mcs_csaxs: MCSCardCSAXS):
assert not mcs._start_monitor_async_data_emission.is_set()
def test_mcs_on_stop(mock_mcs_csaxs: MCSCardCSAXS):
"""Test that on stop sets the omit_mca_callbacks flag. Also test that on stage clears the omit_mca_callbacks flag."""
mcs = mock_mcs_csaxs
assert mcs._omit_mca_callbacks.is_set() is False
mcs.stop()
assert mcs._omit_mca_callbacks.is_set() is True
mcs.stage()
assert mcs._omit_mca_callbacks.is_set() is False
def test_mcs_recovery(mock_mcs_csaxs: MCSCardCSAXS):
mcs = mock_mcs_csaxs
# Simulate ongoing acquisition