67 Commits

Author SHA1 Message Date
29f63ccb75 no real change 2025-08-06 16:39:42 +02:00
71702f5f7a added possibility to plaxe Procedure box into Expert Panel 2025-04-09 15:13:55 +02:00
fb623ee4fc QStackedWidget which does not have tabBar in on_tab_change in guiframe.py 2025-03-31 14:12:30 +02:00
44a113d85d added QStackedWidget as an option in gui_frame.py, a varient of QTabWidget 2025-01-07 14:34:40 +01:00
d0bc302915 Position in gridlayout for measurement_wgt_tab is now configurable from client json file 2024-12-13 15:35:37 +01:00
4cf494982e user:dict['Comment'] 2024-09-15 13:59:31 +02:00
bba3ecf4c1 hdf tuneup 2024-07-03 16:19:24 +02:00
0d763f267c self.appversion for hdf 2024-07-03 11:27:26 +02:00
e785db369f Merge remote-tracking branch 'origin/master' 2024-07-03 09:49:34 +02:00
4783ec5a32 base, elog destination and file naming change 2024-07-03 09:02:11 +02:00
73b187b5a8 guiframe 2024-07-03 08:55:18 +02:00
532e81f165 Typo 2024-02-29 12:11:19 +01:00
4babb18676 Typo 2024-02-29 12:10:33 +01:00
e7118162d4 Type casting in Qt for Python 3.10 2024-02-29 12:09:17 +01:00
425c8125d2 Type casting in Qt for Python 3.10 2024-02-29 11:56:09 +01:00
aeb7e2907f Type casting in Qt for Python 3.10 2024-02-29 11:51:29 +01:00
b17b3200fc Type casting in Qt for Python 3.10 2024-02-29 11:50:30 +01:00
aa31b65750 Type casting in Qt for Python 3.10 2024-02-29 11:46:42 +01:00
886f7e0972 Type casting in Qt for Python 3.10 2024-02-29 11:43:25 +01:00
5fb1d9fe5b cast to Qt.AlignmentFlag when setAlignment includeds an OR 2024-02-27 15:04:40 +01:00
fcca4f08d4 proscan driven 2024-02-08 14:13:01 +01:00
ce700a0c14 check_status update and continued devl for proscan 2024-02-06 16:52:18 +01:00
ac9d3b3e22 check_status_list 2023-11-17 11:57:54 +01:00
fd212a441d line_no in utily.py to replace _line 2023-11-17 11:23:56 +01:00
a1359cca84 corrected error following merge 2023-11-17 10:06:33 +01:00
25eed8079b reinstated (accidentally deleted) pass on line 154 2023-11-17 09:59:20 +01:00
5d6a74ab9d reinstated (accidentally deleted) pass on line 154 2023-11-17 09:58:57 +01:00
e830a52119 fixed conflict in base.py 2023-11-17 09:51:52 +01:00
a0efa778cb changes related to processed data 2023-11-17 09:47:40 +01:00
44d9971bd8 self.hdf_thread_started connected to signal 2023-10-30 15:18:20 +01:00
e3eca94b0f SaveFigureThread, does not attach files when there is no write permission 2023-09-26 13:48:54 +02:00
8c26ec4c15 SaveFigureThread, does not attach files when there is no write permission 2023-09-26 13:48:22 +02:00
7c291bcbf8 SaveFigureThread, does not attach files when there is no write permission 2023-09-26 13:45:56 +02:00
1f4fe75c73 HLAPipelineCtrl to pipeline widget 2023-08-21 15:43:00 +02:00
f70404c773 Convert array to list in save_to_epics 2023-07-28 16:04:39 +02:00
fcf6b46ec0 Stop plots from appearing twice in elog in SaveFigureThread 2023-07-28 14:49:54 +02:00
6148e37df2 corrections to save-to-epics 2023-07-28 10:13:55 +02:00
7580203c2b print added to closeEvent 2023-07-27 09:19:20 +02:00
3033e07152 print added to closeEvent 2023-07-27 09:14:54 +02:00
655b5c66a7 title attribute to level2-3 wgts 2023-07-26 18:03:21 +02:00
0f6889b54c remove newly introduced bug in start_analysis_thread 2023-07-26 16:10:49 +02:00
a6b60c30ce QTabWidget to grid 2023-07-25 17:35:19 +02:00
8dea32e263 QTabWidget to grid 2023-07-25 17:30:06 +02:00
4e701863c3 QTabWidget to grid 2023-07-25 17:22:18 +02:00
1fdf7013ed QTabWidget to grid 2023-07-25 16:31:06 +02:00
18ca4760bc SaveFigureThread update 2023-07-24 14:09:55 +02:00
16990824a8 SaveFigureThread update 2023-07-24 14:03:19 +02:00
72e4ed4f7a SaveFigureThread update 2023-07-24 13:52:42 +02:00
909ad9a3f3 SaveFigureThread update 2023-07-24 13:35:25 +02:00
2daa9ad6a7 SaveFigureThread update 2023-07-24 12:48:13 +02:00
2246e71e53 SaveFigureThread update 2023-07-24 12:42:29 +02:00
016ca6cebf SaveFigureThread update 2023-07-24 12:15:33 +02:00
9c3177f100 SaveFigureThread update 2023-07-24 12:06:45 +02:00
389a126f28 SaveFigureThread update 2023-07-24 11:55:17 +02:00
d41bba4233 SaveFigureThread for elog 2023-07-24 11:40:58 +02:00
7a237d0d12 SaveFigureThread add GUI2 2023-07-24 11:32:34 +02:00
67b3c3be17 SaveFigureThread prep for GUI2 2023-07-24 11:23:28 +02:00
87df78d969 screenshot from 2nd canvas, update 2023-07-24 10:40:08 +02:00
e46f427ca2 screenshot from 2nd canvas 2023-07-24 10:35:16 +02:00
64570f55c8 utils.py 2023-07-17 09:54:41 +02:00
2105ba8e7d remove print statement in guiframe 2023-06-06 09:38:39 +02:00
bdad077350 logging levels modified; log interface for hipa added 2023-05-16 12:23:19 +02:00
051e361c9f base and guiframe 2023-05-01 09:48:14 +02:00
7cac3da421 overdue update 2023-04-05 14:17:18 +02:00
cb7a74d189 qtab composite widget update 2023-02-24 07:26:49 +01:00
9c2c5d7d37 qtab composite widget update 2023-02-23 08:33:44 +01:00
3640082b59 qtab composite widget added 2023-02-23 08:09:14 +01:00
9 changed files with 2532 additions and 855 deletions

834
base.py

File diff suppressed because it is too large Load Diff

View File

@@ -19,6 +19,8 @@ class Facility(IntEnum):
SwissFEL = 1
SLS = 2
HIPA = 3
PROSCAN = 4
ESS = 5
class MsgSeverity(IntEnum):
""" For use with message logger
@@ -36,6 +38,7 @@ class UserMode(IntEnum):
OPERATION = 1
EXPERT = 2
SIMULATION = 3

File diff suppressed because it is too large Load Diff

327
h5_storage.py Normal file
View File

@@ -0,0 +1,327 @@
import getpass
import time
import re
from functools import lru_cache
import h5py
import numpy as np
dt = h5py.special_dtype(vlen=bytes)
numerical_types = (np.dtype('float64'), np.dtype('float32'), np.dtype('uint16'), np.dtype('uint64'), np.dtype('uint32'))
def stringDataset(group, name, data, system=None):
dset = group.create_dataset(name, (1,), dtype=dt, data=data)
if system:
addSystemAttribute(dset, system)
def addStringAttribute(dset_or_group, name, data):
#return dset_or_group.attrs.create(name, np.string_(data)) # , (1,), dtype=dt)
dset_or_group.attrs[name] = bytes(data, 'utf-8')
def addSystemAttribute(dset_or_group, data):
addStringAttribute(dset_or_group, 'system', data)
def add_dataset(group, name, data, system=None, dtype=None):
if type(data) is str:
stringDataset(group, name, data, system)
else:
if dtype:
dset = group.create_dataset(name, data=data, dtype=dtype)
else:
try:
dset = group.create_dataset(name, data=data)
except Exception as e:
dset = None
print('Error for dataset %s' % name)
print('Continuing')
print(e)
if dset is not None and system:
addSystemAttribute(dset, system)
def saveH5Recursive(h5_filename, data_dict, dataH5=None):
def recurse_save(group, dict_or_data, dict_or_data_name, new_group=None):
if dict_or_data is None:
dict_or_data = 'None'
if group is None:
print("'recurse_save' has been called with None")
raise ValueError
if type(dict_or_data) is dict:
try:
new_group = group.create_group(dict_or_data_name)
except Exception as e:
print("Error in group.create_group", str(e))
return
if new_group is None:
raise ValueError
for key, val in dict_or_data.items():
try:
recurse_save(new_group, val, key)
except ValueError:
print('I called recurse_save with None')
#import pdb; pdb.set_trace()
else:
mydata = dict_or_data
inner_key = dict_or_data_name
if type(mydata) is str:
add_dataset(group, inner_key, mydata.encode('utf-8'), 'unknown')
elif (type(mydata) is list and type(mydata[0]) is str) or (hasattr(mydata, 'dtype') and mydata.dtype.type is np.str_):
# For list of strings, we need this procedure
if type(mydata[0]) is str:
mydata = np.array(mydata)
print("string to np.str", mydata)
elif type(mydata[0]) is str:
print("np.str")
try:
if hasattr(mydata, 'dtype') and \
(mydata.dtype.type is np.str or \
mydata.dtype.type is str) and len(mydata.shape) == 2:
mydata = mydata.flatten()
if len(mydata.shape) == 2:
new_list = [[n.encode('ascii') for n in arr] for arr in mydata]
max_str_size = max(max(len(n) for n in arr) for arr in mydata)
elif len(mydata.shape) == 1:
new_list = [n.encode('ascii') for n in mydata]
max_str_size = max(len(n) for n in mydata)
elif len(mydata.shape) == 0:
new_list = [mydata.encode('ascii')]
max_str_size = len(new_list[0])
#print('Max len %i' % max_str_size)
dset = group.create_dataset(inner_key, mydata.shape, 'S%i' % max_str_size, new_list)
#print(np.array(dset))
dset.attrs.create('system', 'unknown', (1,), dtype=dt)
except Exception as e:
print('Exception:', e )
print('Error', inner_key)
print(type(mydata))
if type(mydata) is list:
print('type(mydata[0])')
print(type(mydata[0]))
print('len mydata shape=', len(mydata.shape))
print('mydata')
print(mydata)
elif hasattr(mydata, 'dtype') and mydata.dtype == np.dtype('O'):
if mydata.shape == ():
add_dataset(group, inner_key, mydata, 'unknown')
elif len(mydata.shape) == 1:
add_dataset(group, inner_key, mydata, 'unknown')
else:
for i in range(mydata.shape[0]):
for j in range(mydata.shape[1]):
try:
add_dataset(group, inner_key+'_%i_%i' % (i,j), mydata[i,j], 'unknown')
except:
print('Error')
print(group, inner_key, i, j)
else:
try:
add_dataset(group, inner_key, mydata, 'unknown')
except Exception as e:
print('Error', e)
print(inner_key, type(mydata))
if dataH5 is None:
with h5py.File(h5_filename, 'w') as dataH5:
for main_key, subdict in data_dict.items():
recurse_save(dataH5, subdict, main_key, None)
print("h5_storage.py SAVED TO FILE", h5_filename, flush=True)
else:
print("data_dict keys", data_dict.keys())
for main_key, subdict in data_dict.items():
recurse_save(dataH5, subdict, main_key, None)
print("h5_storage.py SAVED TO dataH5", flush=True)
#recurse_save(dataH5, data_dict, 'none', new_group=dataH5)
def loadH5Recursive(h5_file):
def recurse_load(group_or_val, key, saved_dict_curr):
type_ = type(group_or_val)
if type_ is h5py._hl.files.File:
for new_key, new_group_or_val in group_or_val.items():
recurse_load(new_group_or_val, new_key, saved_dict_curr)
elif type_ is h5py._hl.group.Group:
saved_dict_curr[key] = new_dict = {}
for new_key, new_group_or_val in group_or_val.items():
recurse_load(new_group_or_val, new_key, new_dict)
elif type_ == np.dtype('O') and type(group_or_val[()]) is bytes:
saved_dict_curr[key] = group_or_val[()].decode()
elif type_ == h5py._hl.dataset.Dataset:
dtype = group_or_val.dtype
#if not hasattr(group_or_val, 'value'):
# print('Could not store key %s with type %s in dict' % (key, dtype))
# return
if dtype in (np.dtype('int64'), np.dtype('int32'), np.dtype('int16'), np.dtype('int8')):
saved_dict_curr[key] = np.array(group_or_val[()], int).squeeze()
elif dtype == np.dtype('bool'):
try:
saved_dict_curr[key] = bool(group_or_val[()])
except:
print('Could not store key %s with type %s in dict (1)' % (key, dtype))
elif dtype in numerical_types:
saved_dict_curr[key] = np.array(group_or_val[()]).squeeze()
elif dtype.str.startswith('|S'):
if group_or_val[()].shape == (1,1):
saved_dict_curr[key] = group_or_val[()][0,0].decode()
elif group_or_val[()].shape == (1,):
saved_dict_curr[key] = group_or_val[()][0].decode()
elif group_or_val[()].shape == ():
saved_dict_curr[key] = group_or_val[()].decode()
else:
saved_dict_curr[key] = [x.decode() for x in group_or_val[()].squeeze()]
elif dtype.str == '|O':
saved_dict_curr[key] = group_or_val[()]
elif type(group_or_val[()]) is str:
saved_dict_curr[key] = group_or_val[()]
else:
print('Could not store key %s with type %s in dict (2)' % (key, dtype))
else:
print('Could not store key %s with type %s in dict (3)' % (key, type_))
saved_dict = {}
with h5py.File(h5_file, 'r') as f:
if 'none' in f:
recurse_load(f['none'], 'key', saved_dict)
saved_dict = saved_dict['key']
else:
recurse_load(f, 'key', saved_dict)
return saved_dict
def save_h5_new(saved_dict, h5_file):
def recurse_save(dict_, group, system):
print('recurse', dict_.keys())
for key, subdict_or_data in dict_.items():
type_ = type(subdict_or_data)
print(key, type_)
if type_ is dict:
new_group = group.create_group(key)
recurse_save(subdict_or_data, new_group, system)
elif type_ is np.ndarray:
add_dataset(group, key, subdict_or_data, system)
elif type_ is str:
add_dataset(group, key, subdict_or_data, system, dtype=dt)
else:
raise ValueError(key, type_)
@lru_cache()
def re_axis(x):
return re.compile(r'gr_%s_axis_(\d+)_(\d+)' % x)
@lru_cache()
def re_gauss_function(x):
return re.compile(r'gr_%s_fit_gauss_function_(\d+)_(\d+)' % x)
n_measurements, n_images = saved_dict['Raw_data']['image'].shape[:2]
# Create arrays for gr / slice values, that differ in size for different n_measurements, n_images
gr_x_shape_max = -1
gr_y_shape_max = -1
for key, data in sorted(saved_dict['Raw_data'].items()):
if key.startswith('gr_x_axis'):
gr_x_shape_max = max(gr_x_shape_max, data.shape[0])
elif key.startswith('gr_y_axis'):
gr_y_shape_max = max(gr_y_shape_max, data.shape[0])
gr_x_axis = np.zeros([n_measurements, n_images, gr_x_shape_max])*np.nan
gr_y_axis = np.zeros([n_measurements, n_images, gr_y_shape_max])*np.nan
gr_x_fit_gauss_function = gr_x_axis.copy()
gr_y_fit_gauss_function = gr_y_axis.copy()
for key, data in sorted(saved_dict['Raw_data'].items()):
for arr, regex in [
(gr_x_axis, re_axis('x')),
(gr_y_axis, re_axis('y')),
(gr_x_fit_gauss_function, re_gauss_function('x')),
(gr_y_fit_gauss_function, re_gauss_function('y')),
]:
match = regex.match(key)
if match is not None:
#print(key, 'matches', regex)
n_measurement, n_image = map(int, match.groups())
arr[n_measurement, n_image,:len(data)] = data
continue
with h5py.File(h5_file, 'w') as f:
general = f.create_group('general')
stringDataset(general, 'user', getpass.getuser())
stringDataset(general, 'application', 'EmittanceTool')
stringDataset(general, 'author', 'Philipp Dijkstal and Eduard Prat')
stringDataset(general, 'created', time.ctime())
experiment = f.create_group('experiment')
try:
from epics import caget
lrr = float(caget('SIN-TIMAST-TMA:Beam-Exp-Freq-RB'))
except Exception as e:
print('Could not obtain Laser rep rate!')
print(e)
lrr = np.nan
add_dataset(experiment, 'Laser rep rate', lrr, 'unknown')
# TBD: save snapshot here
scan1 = f.create_group('scan 1')
method = scan1.create_group('method')
method.create_dataset('records', data=[float(n_measurements)])
method.create_dataset('samples', data=[float(n_images)])
method.create_dataset('dimension', data=[1])
stringDataset(method, 'type', 'Line scan')
recurse_save(saved_dict['Input'], method, 'Application Input')
data = scan1.create_group('data')
screen = data.create_group(saved_dict['Input']['Profile monitor'])
recurse_save(saved_dict['Meta_data'], screen, 'Emittance data')
for key, data_ in sorted(saved_dict['Raw_data'].items()):
if not any([x.match(key) for x in [re_axis('x'), re_axis('y'), re_gauss_function('x'), re_gauss_function('y')]]):
add_dataset(screen, key, data_, 'Camera')
#print('Created %s' % key)
if not np.all(np.isnan(gr_x_axis)):
add_dataset(screen, 'gr_x_axis', gr_x_axis, 'Camera')
else:
print('gr_x_axis is nan')
if not np.all(np.isnan(gr_y_axis)):
add_dataset(screen, 'gr_y_axis', gr_y_axis, 'Camera')
else:
print('gr_y_axis is nan')
if not np.all(np.isnan(gr_x_fit_gauss_function)):
add_dataset(screen, 'gr_x_fit_gauss_function', gr_x_fit_gauss_function, 'Camera')
else:
print('gr_x_fit_gauss_function is nan')
if not np.all(np.isnan(gr_y_fit_gauss_function)):
add_dataset(screen, 'gr_y_fit_gauss_function', gr_y_fit_gauss_function, 'Camera')
else:
print('gr_y_fit_gauss_function is nan')
if 'Magnet_data' in saved_dict:
for n_magnet, magnet in enumerate(saved_dict['Magnet_data']['Magnets']):
mag_group = method.create_group('actuators/%s' % magnet)
add_dataset(mag_group, 'K', saved_dict['Magnet_data']['K'][n_magnet], 'Magnet')
add_dataset(mag_group, 'I-SET', saved_dict['Magnet_data']['I-SET'][n_magnet], 'Magnet')
elif not saved_dict['Input']['Dry run'] in (np.array(False), False):
raise ValueError('No magnet data')
else:
print('Magnet data not saved.')

View File

@@ -38,7 +38,6 @@ class QSaveHDF(QDialog):
self.user_dict = {}
self.user_dict['Comment'] = None
#self.file_name = None
#self.excluded_input = ['Year', 'Month', 'Date']
if 'Time in seconds' in input_options.keys():
@@ -220,7 +219,6 @@ class QSaveHDF(QDialog):
def get_data(self):
self.user_dict['Application'] = self.applicationLabel.text()
self.user_dict['User'] = self.author.text()
self.user_dict['Comment'] = self.comment.document().toPlainText()
@@ -245,6 +243,7 @@ class QSaveHDF(QDialog):
def save(self):
self.get_data()
self.parent.hdf_user_dict = self.user_dict
self.parent.hdf_filename = self.user_dict['Destination']
self.parent.save_to_hdf(from_dialog=self.from_dialog)
self.close()

View File

@@ -9,22 +9,13 @@ from qtpy.QtWidgets import (QComboBox, QDialog, QFileDialog, QHBoxLayout,
QVBoxLayout)
import elog # https://github.com/paulscherrerinstitute/py_elog
from pyqtacc.bdbase.enumkind import MsgSeverity
from apps4ops.bdbase.enumkind import MsgSeverity
from apps4ops.bdbase.utils import _line
_version = "1.0.0"
_pymodule = os.path.basename(__file__)
_appname, _appext = _pymodule.split(".")
def _line():
"""Macro to return the current line number.
The current line number within the file is used when
reporting messages to the message logging window.
Returns:
int: Current line number.
"""
return inspect.currentframe().f_back.f_lineno
class QSendToELOG(QDialog):
""" Graphical interface to elog

View File

@@ -4,27 +4,19 @@ import os
import time
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (QComboBox, QDialog, QFileDialog, QHBoxLayout,
QLabel, QLineEdit, QPushButton, QTextEdit,
QVBoxLayout)
from qtpy.QtWidgets import (
QApplication, QComboBox, QDialog, QFileDialog, QHBoxLayout, QLabel,
QLineEdit, QPushButton, QTextEdit, QVBoxLayout)
import elog # https://github.com/paulscherrerinstitute/py_elog
from pyqtacc.bdbase.enumkind import MsgSeverity
from apps4ops.bdbase.enumkind import MsgSeverity
from apps4ops.bdbase.utils import _line
_version = "1.0.0"
_pymodule = os.path.basename(__file__)
_appname, _appext = _pymodule.split(".")
def _line():
"""Macro to return the current line number.
The current line number within the file is used when
reporting messages to the message logging window.
Returns:
int: Current line number.
"""
return inspect.currentframe().f_back.f_lineno
class QSendToELOGFrame(QDialog):
""" Graphical interface to elog
@@ -212,7 +204,7 @@ class QSendToELOGFrame(QDialog):
self.attributes['When'] = str(time.time())
self.attributes['Wann'] = str(time.time())
QApplication.processEvents()
if self.attachFile is not None:
_attachFile = []
@@ -227,15 +219,18 @@ class QSendToELOGFrame(QDialog):
self.files.append(str(_attachFile[i]))
elif "/sls/bd/data/" in _attachFile[i]:
self.files.append(str(_attachFile[i]))
elif "/sf/data/" in _attachFile[i]:
self.files.append(str(_attachFile[i]))
else:
self.files.append(self.destination + str(_attachFile[i]))
QApplication.processEvents()
el = self.elog_items.currentText()
url = self.parent.settings.data["ElogBooks"][el]["url"]
self.logbook = elog.open(url, user='robot', password='robot')
QApplication.processEvents()
try:
if self.files:
@@ -307,18 +302,24 @@ class QSendToELOGFrame(QDialog):
#find layout items
layout_items = []
layout_items_optional = []
print("logbook", logbook)
try:
layout_items = list(self.parent.settings.data[
"ElogBooks"][logbook]['Required'].keys())
except KeyError:
pass
print("logbook- items", layout_items)
try:
layout_items_optional = list(self.parent.settings.data[
"ElogBooks"][logbook]['Optional'].keys())
except KeyError:
pass
layout_items.extend(layout_items_optional)
print("logbook- optional", layout_items_optional)
if layout_items_optional:
layout_items.extend(layout_items_optional)
return layout_items

32
setup_version Normal file
View File

@@ -0,0 +1,32 @@
#to be executed from top directory
#create a directory with version number given below.
mkdir -p v1.7.0
cd v1.7.0
mkdir -p apps4ops
mkdir -p caqtwidgets
mkdir -p common
ln -s apps4ops pyqtacc
cd apps4ops
mkdir -p bdbase
mkdir -p qrc_resources
mkdir -p sf
mkdir -p sls
mkdir -p hipa
cd bdbase
git clone git@gitlab.psi.ch:pyqtacc/bdbase.git .
cd ../qrc_resources
git clone git@gitlab.psi.ch:pyqtacc/qrc_resources.git .
cd ../sf
git clone git@gitlab.psi.ch:pyqtacc/sf.git .
cd ../sls
git clone git@gitlab.psi.ch:pyqtacc/sls.git .
cd ../hipa
git clone git@gitlab.psi.ch:pyqtacc/hipa.git .
cd ../../
mkdir -p common
cd common
git clone git@gitlab.psi.ch:pyqtacc/common.git .
cd ../
cd caqtwidgets
git clone git@gitlab.psi.ch:cafe/caqtwidgets.git .
cd ../../

24
utils.py Normal file
View File

@@ -0,0 +1,24 @@
from inspect import currentframe
def _line():
"""Macro to return the current line number.
The current line number within the file is used when
reporting messages to the message logging window.
Returns:
int: Current line number.
"""
return currentframe().f_back.f_lineno
def line_no():
"""Macro to return the current line number.
The current line number within the file is used when
reporting messages to the message logging window.
Returns:
int: Current line number.
"""
return currentframe().f_back.f_lineno