changes related to processed data

This commit is contained in:
2023-11-17 09:47:40 +01:00
parent 44d9971bd8
commit a0efa778cb
5 changed files with 497 additions and 118 deletions

261
base.py
View File

@@ -26,7 +26,6 @@ from qtpy.QtWidgets import (QAction, QApplication, QDialog, QFrame, QLabel,
QProgressBar, QScrollArea, QSizePolicy,
QSplashScreen, QVBoxLayout, QWidget)
from pyqtacc.bdbase.utils import _line
from pyqtacc.bdbase.enumkind import Facility, MsgSeverity, UserMode
from pyqtacc.bdbase.helpbrowser import HelpBrowser
@@ -35,11 +34,9 @@ from pyqtacc.bdbase.savehdf import QSaveHDF
from pyqtacc.bdbase.hdf5filemenu import HDF5GroupBox
from pyqtacc.bdbase.sendelog import QSendToELOG
from pyqtacc.bdbase.screenshot import QScreenshot
from pyqtacc.bdbase.guiframe import GUIFrame
from caqtwidgets.pvwidgets import QNoDockWidget
import PyCafe
@@ -60,18 +57,6 @@ CENTRAL_WIDGET_MINIMUM_HEIGHT = 840
CENTRAL_WIDGET_MINIMUM_WIDTH = 1240
SLS_CENTRAL_WIDGET_MINIMUM_HEIGHT = 840
SLS_CENTRAL_WIDGET_MINIMUM_WIDTH = 940
'''
def _line():
"""Macro to return the current line number.
The current line number within the file is used when
reporting messages to the message logging window.
Returns:
int: Current line number.
"""
return inspect.currentframe().f_back.f_lineno
'''
class BaseWindow(QMainWindow):
""" BaseWindow
@@ -96,8 +81,9 @@ class BaseWindow(QMainWindow):
self.all_data = self.parent.all_data
self.all_data_2 = self.parent.all_data_2
def __del__(self):
self.wait()
#Causes QThread::wait: Thread tried to wait on itself
#def __del__(self):
# self.wait()
def run(self):
attach_files = []
@@ -110,6 +96,7 @@ class BaseWindow(QMainWindow):
def extract_and_attach(i, nfig, name, all_fig_data):
canvas = 'Canvas {0}'.format(i+1)
name_base = name.replace(' ', '_').lower()
write_message_fired = False
if all_fig_data[canvas] is not None:
nfig_canvas = len(all_fig_data[canvas])
@@ -143,7 +130,7 @@ class BaseWindow(QMainWindow):
attach_files.append(save_dest)
write_message_fired = False
try:
resultsSeq = self.settings.data["GUI"]["resultsSeq"]
titleSeq = self.settings.data["GUI"]["subResultsTabTitle"]
@@ -154,7 +141,6 @@ class BaseWindow(QMainWindow):
except KeyError as ex:
pass
try:
resultsSeq = self.settings.data["GUI2"]["resultsSeq"]
titleSeq = self.settings.data["GUI2"]["subResultsTabTitle"]
@@ -165,8 +151,6 @@ class BaseWindow(QMainWindow):
except KeyError as ex:
pass
#Not so nice.. send a signal instead?
if attach_files:
self.parent.attach_files = attach_files
@@ -185,61 +169,81 @@ class BaseWindow(QMainWindow):
self.parent = parent
self.from_dialog=from_dialog
def __del__(self):
self.wait()
#Only a precaution, not experienced
#Causes QThread::wait: Thread tried to wait on itself
#def __del__(self):
# self.wait()
def run(self):
"""Run hdf thread
"""
QApplication.processEvents(QEventLoop.ExcludeUserInputEvents, 5000)
self.all_data = self.parent.all_data
#Reanalysis data
if self.all_data is not None:
ts_in_seconds = self.all_data['Ambient data']['Time in seconds']
try:
if 'Time in seconds' in self.all_data['Ambient data']:
ts_in_seconds = self.all_data['Ambient data'][
'Time in seconds']
else:
ts_in_seconds = None
except KeyError:
ts_in_seconds = None
now_in_seconds = None
from_hdf = False
if 'Reanalysis time in seconds' in self.all_data['Processed data']:
from_hdf = bool(self.all_data['Processed data']['Reanalysis time'])
if from_hdf:
now_in_seconds = self.all_data['Processed data'][
'Reanalysis time in seconds']
try:
if 'Reanalysis time in seconds' in self.all_data[
'Processed data']:
from_hdf = bool(
self.all_data['Processed data']['Reanalysis time'])
if from_hdf:
now_in_seconds = self.all_data['Processed data'][
'Reanalysis time in seconds']
#Double check
if not from_hdf:
if 'from_hdf' in self.all_data['Processed data']:
from_hdf = bool(self.all_data['Processed data'][
'from_hdf'])
except KeyError:
now_in_seconds = None
self.parent.from_hdf = from_hdf
print("t=========================>", ts_in_seconds, " // ", now_in_seconds)
print("from hdf5=========================>", from_hdf)
if self.parent.hdf_filename is None or not self.from_dialog:
self.parent.set_new_hdf_filename(ts_in_seconds,
now_in_seconds)
#else:
# self.parent.set_new_hdf_filename(ts_in_seconds,
# now_in_seconds)
#print("Parent Thread after ==>", self.parent.hdf_filename)
#Open hdf5file here and
#mess = "HDF save to file {0} proceeding...".format(
# self.parent.hdf_filename)
#self.parent.trigger_log_message.emit(MsgSeverity.INFO.name,
# _pymodule, _line(),
# mess, {})
try:
print("FILENAME ==", self.parent.hdf_filename, flush=True)
with h5py.File(self.parent.hdf_filename, 'w') as dataH5:
self.parent.add_pvs_to_hdf(
dataH5, pv_list=self.parent.pv_machine_list,
from_hdf=from_hdf)
self.parent.add_general_to_hdf(dataH5)
self.parent.add_to_hdf(dataH5, proc=True, raw=True)
self.parent.hdf_save_completed = True
#experiment
if not from_hdf:
self.parent.add_pvs_to_hdf(
dataH5, pv_list=self.parent.pv_machine_list,
from_hdf=from_hdf)
#general
#if not from_hdf:
self.parent.add_general_to_hdf(dataH5)
self.parent.add_to_hdf(dataH5, proc=True, raw=True)
self.parent.hdf_save_completed = True
_mess = "Processed data saved to {}".format(
self.parent.hdf_filename)
self.parent.trigger_log_message.emit(
MsgSeverity.INFO.name, _pymodule, _line(), _mess, {})
MsgSeverity.INFO.name, _pymodule, _line(), _mess,
{})
except OSError as e:
_mess = "OSError in saving to file {0}: {1}".format(
_mess = "OSError in saving to file {0}: \n{1}".format(
self.parent.hdf_filename, str(e))
self.parent.trigger_log_message.emit(
MsgSeverity.ERROR.name, _pymodule, _line(), _mess, {})
@@ -258,13 +262,14 @@ class BaseWindow(QMainWindow):
self.all_data = all_data
self.hdf_filename_loaded = self.parent.hdf_filename_loaded
def __del__(self):
self.wait()
#Only a precaution, not experienced
#Causes QThread::wait: Thread tried to wait on itself
#def __del__(self):
# self.wait()
def run(self):
"""Run hdf thread
"""
if not hasattr(self.analysis_procedure, 'load_hdf_file'):
mess = ("Analysis not configured for HDF analysis! " +
@@ -277,8 +282,6 @@ class BaseWindow(QMainWindow):
self.all_data = self.analysis_procedure.load_hdf_file(
self.hdf_filename_loaded)
if not self.all_data:
self.parent.trigger_progressbar.emit(PROGRESS_THREAD_END)
return
@@ -310,6 +313,7 @@ class BaseWindow(QMainWindow):
# Emit results
if all_dict is not None:
self.parent.from_hdf = True
self.trigger_thread_event.emit(all_dict)
mess = "HDF file {} analysis succeeded".format(
self.hdf_filename_loaded)
@@ -391,7 +395,6 @@ class BaseWindow(QMainWindow):
self.settings = ReadJSON(self.appname)
#Read out current_logbook
self.cafe = PyCafe.CyCafe()
@@ -411,10 +414,10 @@ class BaseWindow(QMainWindow):
self.hdf_filename_loaded = "NONE" #For loading into hdf dockwidget
self.hdf_filename = None #For saving
self.hdf_dialog = None
self.from_hdf = False
self.daq_analysis_completed = False
self.setObjectName("MainWindow")
self.setWindowTitle(self.appname)
@@ -424,20 +427,7 @@ class BaseWindow(QMainWindow):
self.menu = self.menuBar()
'''
try:
dirname = self.settings.data["stdout"]["destination"]
except KeyError:
dirname = "/tmp/"
if not os.path.exists(dirname):
os.mkdir(dirname)
fname = dirname + self.appname + ".log"
file_obj = os.open(fname, os.O_RDWR|os.O_CREAT)
os.close(file_obj)
'''
self.elog_dest = self.settings.data["Elog"]["destination"]
self.screenshot_dest = self.settings.data["screenshot"]["destination"]
@@ -891,16 +881,21 @@ class BaseWindow(QMainWindow):
_mess, QMessageBox.Ok)
return False
if self.daq_analysis_completed and not self.hdf_save_completed:
if 'Reanalysis time' in self.all_data['Processed data']:
if not self.all_data['Processed data']['Reanalysis time']:
_mess = ("Are you sure you wish to exit " +
"without saving data to HDF?")
qm = QMessageBox()
reply = qm.warning(self, "Exit", _mess,
QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.No:
return False
if self.daq_analysis_completed and not self.hdf_save_completed \
and not self.from_hdf:
if self.all_data is not None:
try:
if 'Reanalysis time' in self.all_data['Processed data']:
if not self.all_data['Processed data']['Reanalysis time']:
_mess = ("Are you sure you wish to exit " +
"without saving data to HDF?")
qm = QMessageBox()
reply = qm.warning(self, "Exit", _mess,
QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.No:
return False
except KeyError:
pass
return True
@Slot()
@@ -1005,9 +1000,11 @@ class BaseWindow(QMainWindow):
QApplication.processEvents()
if self.autopost_epics:
self.save_to_epics()
QApplication.processEvents()
if self.autopost_hdf:
self.save_to_hdf()
QApplication.processEvents()
if self.autopost_elog:
self.send_to_elog()
@@ -1073,7 +1070,7 @@ class BaseWindow(QMainWindow):
if not self.daq_analysis_completed:
QMessageBox.information(self, "HDF", (
("No data to save to hdf5; no measurement undertaken!")),
("No data to save to hdf; no measurement undertaken!")),
QMessageBox.Ok)
QApplication.processEvents()
return False
@@ -1144,8 +1141,9 @@ class BaseWindow(QMainWindow):
print(_mess, flush=True)
self.trigger_log_message.emit(
MsgSeverity.WARN.name, _pymodule, _line(), _mess,
{})
{})
return isOK
if from_hdf:
return isOK
@@ -1193,8 +1191,11 @@ class BaseWindow(QMainWindow):
user_dict['User'] = getpass.getuser()
if self.all_data is not None:
time_in_seconds = self.all_data['Ambient data']['Time in seconds']
now = datetime.fromtimestamp(time_in_seconds)
if 'Time in seconds' in self.all_data['Ambient data']:
time_in_seconds = self.all_data['Ambient data']['Time in seconds']
now = datetime.fromtimestamp(time_in_seconds)
else:
now = datetime.now()
else:
now = datetime.now()
@@ -1222,19 +1223,14 @@ class BaseWindow(QMainWindow):
""" This uses the widget interface to allow the user to enter
additional meta-data
"""
if not self.verify_save_to_hdf():
return False
input_options = OrderedDict()
#print(self.all_data['Ambient data'])
#print(self.all_data['Processed data'])
#QCombobox if list
#input_options['QComboBox'] = ['one', 'two', 'three']
#input_options['Comment'] = 'Please enter a comment'
if self.all_data is not None:
ts_in_seconds = self.all_data['Ambient data']['Time in seconds']
now_in_seconds = None
if 'Reanalysis time in seconds' in self.all_data['Processed data']:
@@ -1249,9 +1245,7 @@ class BaseWindow(QMainWindow):
self.hdf_dialog = QSaveHDF(self, input_options=input_options,
from_dialog=True)
#user_dict = self.hdf_dialog.get_data()
#self.hdf_filename = user_dict['Destination']
#print("filename", self.hdf_filename)
def verify_send_to_elog(self):
@@ -1274,12 +1268,11 @@ class BaseWindow(QMainWindow):
if self.save_hdf_thread.isRunning():
return True
if not self.hdf_save_completed:
if not self.hdf_save_completed and not self.from_hdf:
_mess = ("Opening ELOG, but please note that data have not " +
"been saved to HDF. " +
"<br>Click on the HDF icon to do this if desired")
QMessageBox.information(self, "ELOG", _mess,
QMessageBox.Ok)
QMessageBox.information(self, "ELOG", _mess, QMessageBox.Ok)
return True
return True
@@ -1746,7 +1739,8 @@ class BaseWindow(QMainWindow):
@Slot()
def start_analysis_thread(self):
'''Slot to self.start_wgt button trigger in guiframe.py
'''
if not self.analysis_procedure:
mess = "Analysis thread not configured for this application"
self.show_log_message(MsgSeverity.ERROR, _pymodule, _line(), mess)
@@ -1821,6 +1815,7 @@ class BaseWindow(QMainWindow):
@Slot(dict)
def receive_analysis_results(self, all_dict):
self.all_data = all_dict
print("self.all_data", self.all_data.keys(), flush=True)
self.gui_frame.canvas_update(all_dict['Figure data'])
@@ -1873,7 +1868,7 @@ class BaseWindow(QMainWindow):
else:
pass
#print("receive_analysis_results=========================>", flush=True)
print("receive_analysis_results=========================>", flush=True)
@Slot()
def receive_abort_analysis(self):
@@ -2121,3 +2116,59 @@ class BaseWindow(QMainWindow):
<br></p>
""".format(self.splash_appname), Qt.AlignCenter | Qt.AlignTop)
self.splash_screen.finish(myapp)
def check_status_list(self, pymodule: str = _pymodule,
operation: str = "channel access",
pv_list: list = None, status_list: list = None,
line: int = _line()):
if None in (pv_list, status_list):
return
brk = ("------------------------------------------------------" +
"------------------------------------------------------")
self.trigger_log_message.emit(
MsgSeverity.INFO.name, pymodule, line, brk, {})
options = {}
for i, (pv, stat) in enumerate(zip(pv_list, status_list)):
if stat == self.cyca.ICAFE_NORMAL:
mess = "Error in '{0}' for element [{1}], {2}.".format(
operation, i, pv)
options['statusCode'] = (
str(stat) + " " +
self.cafe.getStatusCodeAsString(stat))
options['statusInfo'] = self.cafe.getStatusInfo(stat)
self.trigger_log_message.emit(
MsgSeverity.WARN.name, pymodule, line, mess, options)
self.trigger_log_message.emit(
MsgSeverity.INFO.name, pymodule, line, brk, {})
mess = ("The following devices reported an error " +
"in channel access operation:")
self.trigger_log_message.emit(
MsgSeverity.INFO.name, pymodule, line, mess, {})
def check_status(self, pymodule: str = _pymodule,
operation: str = "channel access",
pv: str = None, stat: int = None,
line: int =_line()):
if None in (pv, status):
return
if stat != self.cyca.ICAFE_NORMAL:
mess = "Error in '{0}' for {1}.".format(operation, pv)
options = {}
options['statusCode'] = (
str(stat) + " " +
self.cafe.getStatusCodeAsString(stat))
options['statusInfo'] = self.cafe.getStatusInfo(stat)
self.trigger_log_message.emit(
MsgSeverity.WARN.name, pymodule, line, mess, options)

View File

@@ -169,7 +169,6 @@ class GUIFrame(QWidget):
self.results_tab_wgt_titles)
self.canvas_current_idx = [0] * len(self.results_tab_wgt_titles)
self.init_results_tab_wgt()
@@ -195,7 +194,6 @@ class GUIFrame(QWidget):
self.canvas_current_idx_2 = [0] * len(
self.results_tab_wgt_titles_2)
self.init_results_tab_wgt_2()
@@ -834,10 +832,6 @@ class GUIFrame(QWidget):
_tip = "Restore quadruples to their pre-measurement values"
self.restore_optics_btn.setToolTip(_tip)
#_text = """<p> <font style="color:{0}; background-color:'white'";>
# {1} <br> {2} </font>
# </p>
# """.format('gray', 'Deflector', 'PM')
self.target_optics = QLabel()
#self.target_optics.setText(_text)
self.target_optics.setFixedWidth(150)
@@ -904,6 +898,7 @@ class GUIFrame(QWidget):
self.start_wgt.setEnabled(False)
self.start_wgt.setText("HDF analysis...")
self.save_all_group_box.setEnabled(False)
def hdf_reset_procedure(self):
self.parent.h5_groupbox.analyze_h5_widget.setEnabled(True)
@@ -2002,6 +1997,8 @@ class GUIFrame(QWidget):
self.input_parameters[key] = 0
return
if hasattr(radio_buddy_text_dict[self.sender()], "pv_info"):
dt = radio_buddy_text_dict[self.sender()].pv_info.dataType
@@ -2009,6 +2006,10 @@ class GUIFrame(QWidget):
if dt not in [self.cyca.CY_DBR_STRING, self.cyca.CY_DBR_ENUM,
self.cyca.CY_DBR_CHAR]:
value_input = re.findall(r"-?\d+\.?\d*", value_str)
if dt in [self.cyca.CY_DBR_FLOAT, self.cyca.CY_DBR_DOUBLE]:
self.input_parameters[key] = float(value_input[0])
elif dt in [self.cyca.CY_DBR_SHORT, self.cyca.CY_DBR_LONG]:
@@ -2049,7 +2050,7 @@ class GUIFrame(QWidget):
if pvname in self.current_stacked_wgt_dict.values() and \
radiobutton_list[0].isChecked(): #
self.input_parameters[key] = pvdata.value[0]
self.input_parameters[key] = pvdata.value[0]
if monitor:
monitor_pv = CAQLabel(self, pv_name=pv, monitor_callback=mon_cb,

327
h5_storage.py Normal file
View File

@@ -0,0 +1,327 @@
import getpass
import time
import re
from functools import lru_cache
import h5py
import numpy as np
dt = h5py.special_dtype(vlen=bytes)
numerical_types = (np.dtype('float64'), np.dtype('float32'), np.dtype('uint16'), np.dtype('uint64'), np.dtype('uint32'))
def stringDataset(group, name, data, system=None):
dset = group.create_dataset(name, (1,), dtype=dt, data=data)
if system:
addSystemAttribute(dset, system)
def addStringAttribute(dset_or_group, name, data):
#return dset_or_group.attrs.create(name, np.string_(data)) # , (1,), dtype=dt)
dset_or_group.attrs[name] = bytes(data, 'utf-8')
def addSystemAttribute(dset_or_group, data):
addStringAttribute(dset_or_group, 'system', data)
def add_dataset(group, name, data, system=None, dtype=None):
if type(data) is str:
stringDataset(group, name, data, system)
else:
if dtype:
dset = group.create_dataset(name, data=data, dtype=dtype)
else:
try:
dset = group.create_dataset(name, data=data)
except Exception as e:
dset = None
print('Error for dataset %s' % name)
print('Continuing')
print(e)
if dset is not None and system:
addSystemAttribute(dset, system)
def saveH5Recursive(h5_filename, data_dict, dataH5=None):
def recurse_save(group, dict_or_data, dict_or_data_name, new_group=None):
if dict_or_data is None:
dict_or_data = 'None'
if group is None:
print("'recurse_save' has been called with None")
raise ValueError
if type(dict_or_data) is dict:
try:
new_group = group.create_group(dict_or_data_name)
except Exception as e:
print("Error in group.create_group", str(e))
return
if new_group is None:
raise ValueError
for key, val in dict_or_data.items():
try:
recurse_save(new_group, val, key)
except ValueError:
print('I called recurse_save with None')
#import pdb; pdb.set_trace()
else:
mydata = dict_or_data
inner_key = dict_or_data_name
if type(mydata) is str:
add_dataset(group, inner_key, mydata.encode('utf-8'), 'unknown')
elif (type(mydata) is list and type(mydata[0]) is str) or (hasattr(mydata, 'dtype') and mydata.dtype.type is np.str_):
# For list of strings, we need this procedure
if type(mydata[0]) is str:
mydata = np.array(mydata)
print("string to np.str", mydata)
elif type(mydata[0]) is str:
print("np.str")
try:
if hasattr(mydata, 'dtype') and \
(mydata.dtype.type is np.str or \
mydata.dtype.type is str) and len(mydata.shape) == 2:
mydata = mydata.flatten()
if len(mydata.shape) == 2:
new_list = [[n.encode('ascii') for n in arr] for arr in mydata]
max_str_size = max(max(len(n) for n in arr) for arr in mydata)
elif len(mydata.shape) == 1:
new_list = [n.encode('ascii') for n in mydata]
max_str_size = max(len(n) for n in mydata)
elif len(mydata.shape) == 0:
new_list = [mydata.encode('ascii')]
max_str_size = len(new_list[0])
#print('Max len %i' % max_str_size)
dset = group.create_dataset(inner_key, mydata.shape, 'S%i' % max_str_size, new_list)
#print(np.array(dset))
dset.attrs.create('system', 'unknown', (1,), dtype=dt)
except Exception as e:
print('Exception:', e )
print('Error', inner_key)
print(type(mydata))
if type(mydata) is list:
print('type(mydata[0])')
print(type(mydata[0]))
print('len mydata shape=', len(mydata.shape))
print('mydata')
print(mydata)
elif hasattr(mydata, 'dtype') and mydata.dtype == np.dtype('O'):
if mydata.shape == ():
add_dataset(group, inner_key, mydata, 'unknown')
elif len(mydata.shape) == 1:
add_dataset(group, inner_key, mydata, 'unknown')
else:
for i in range(mydata.shape[0]):
for j in range(mydata.shape[1]):
try:
add_dataset(group, inner_key+'_%i_%i' % (i,j), mydata[i,j], 'unknown')
except:
print('Error')
print(group, inner_key, i, j)
else:
try:
add_dataset(group, inner_key, mydata, 'unknown')
except Exception as e:
print('Error', e)
print(inner_key, type(mydata))
if dataH5 is None:
with h5py.File(h5_filename, 'w') as dataH5:
for main_key, subdict in data_dict.items():
recurse_save(dataH5, subdict, main_key, None)
print("h5_storage.py SAVED TO FILE", h5_filename, flush=True)
else:
print("data_dict keys", data_dict.keys())
for main_key, subdict in data_dict.items():
recurse_save(dataH5, subdict, main_key, None)
print("h5_storage.py SAVED TO dataH5", flush=True)
#recurse_save(dataH5, data_dict, 'none', new_group=dataH5)
def loadH5Recursive(h5_file):
def recurse_load(group_or_val, key, saved_dict_curr):
type_ = type(group_or_val)
if type_ is h5py._hl.files.File:
for new_key, new_group_or_val in group_or_val.items():
recurse_load(new_group_or_val, new_key, saved_dict_curr)
elif type_ is h5py._hl.group.Group:
saved_dict_curr[key] = new_dict = {}
for new_key, new_group_or_val in group_or_val.items():
recurse_load(new_group_or_val, new_key, new_dict)
elif type_ == np.dtype('O') and type(group_or_val[()]) is bytes:
saved_dict_curr[key] = group_or_val[()].decode()
elif type_ == h5py._hl.dataset.Dataset:
dtype = group_or_val.dtype
#if not hasattr(group_or_val, 'value'):
# print('Could not store key %s with type %s in dict' % (key, dtype))
# return
if dtype in (np.dtype('int64'), np.dtype('int32'), np.dtype('int16'), np.dtype('int8')):
saved_dict_curr[key] = np.array(group_or_val[()], int).squeeze()
elif dtype == np.dtype('bool'):
try:
saved_dict_curr[key] = bool(group_or_val[()])
except:
print('Could not store key %s with type %s in dict (1)' % (key, dtype))
elif dtype in numerical_types:
saved_dict_curr[key] = np.array(group_or_val[()]).squeeze()
elif dtype.str.startswith('|S'):
if group_or_val[()].shape == (1,1):
saved_dict_curr[key] = group_or_val[()][0,0].decode()
elif group_or_val[()].shape == (1,):
saved_dict_curr[key] = group_or_val[()][0].decode()
elif group_or_val[()].shape == ():
saved_dict_curr[key] = group_or_val[()].decode()
else:
saved_dict_curr[key] = [x.decode() for x in group_or_val[()].squeeze()]
elif dtype.str == '|O':
saved_dict_curr[key] = group_or_val[()]
elif type(group_or_val[()]) is str:
saved_dict_curr[key] = group_or_val[()]
else:
print('Could not store key %s with type %s in dict (2)' % (key, dtype))
else:
print('Could not store key %s with type %s in dict (3)' % (key, type_))
saved_dict = {}
with h5py.File(h5_file, 'r') as f:
if 'none' in f:
recurse_load(f['none'], 'key', saved_dict)
saved_dict = saved_dict['key']
else:
recurse_load(f, 'key', saved_dict)
return saved_dict
def save_h5_new(saved_dict, h5_file):
def recurse_save(dict_, group, system):
print('recurse', dict_.keys())
for key, subdict_or_data in dict_.items():
type_ = type(subdict_or_data)
print(key, type_)
if type_ is dict:
new_group = group.create_group(key)
recurse_save(subdict_or_data, new_group, system)
elif type_ is np.ndarray:
add_dataset(group, key, subdict_or_data, system)
elif type_ is str:
add_dataset(group, key, subdict_or_data, system, dtype=dt)
else:
raise ValueError(key, type_)
@lru_cache()
def re_axis(x):
return re.compile(r'gr_%s_axis_(\d+)_(\d+)' % x)
@lru_cache()
def re_gauss_function(x):
return re.compile(r'gr_%s_fit_gauss_function_(\d+)_(\d+)' % x)
n_measurements, n_images = saved_dict['Raw_data']['image'].shape[:2]
# Create arrays for gr / slice values, that differ in size for different n_measurements, n_images
gr_x_shape_max = -1
gr_y_shape_max = -1
for key, data in sorted(saved_dict['Raw_data'].items()):
if key.startswith('gr_x_axis'):
gr_x_shape_max = max(gr_x_shape_max, data.shape[0])
elif key.startswith('gr_y_axis'):
gr_y_shape_max = max(gr_y_shape_max, data.shape[0])
gr_x_axis = np.zeros([n_measurements, n_images, gr_x_shape_max])*np.nan
gr_y_axis = np.zeros([n_measurements, n_images, gr_y_shape_max])*np.nan
gr_x_fit_gauss_function = gr_x_axis.copy()
gr_y_fit_gauss_function = gr_y_axis.copy()
for key, data in sorted(saved_dict['Raw_data'].items()):
for arr, regex in [
(gr_x_axis, re_axis('x')),
(gr_y_axis, re_axis('y')),
(gr_x_fit_gauss_function, re_gauss_function('x')),
(gr_y_fit_gauss_function, re_gauss_function('y')),
]:
match = regex.match(key)
if match is not None:
#print(key, 'matches', regex)
n_measurement, n_image = map(int, match.groups())
arr[n_measurement, n_image,:len(data)] = data
continue
with h5py.File(h5_file, 'w') as f:
general = f.create_group('general')
stringDataset(general, 'user', getpass.getuser())
stringDataset(general, 'application', 'EmittanceTool')
stringDataset(general, 'author', 'Philipp Dijkstal and Eduard Prat')
stringDataset(general, 'created', time.ctime())
experiment = f.create_group('experiment')
try:
from epics import caget
lrr = float(caget('SIN-TIMAST-TMA:Beam-Exp-Freq-RB'))
except Exception as e:
print('Could not obtain Laser rep rate!')
print(e)
lrr = np.nan
add_dataset(experiment, 'Laser rep rate', lrr, 'unknown')
# TBD: save snapshot here
scan1 = f.create_group('scan 1')
method = scan1.create_group('method')
method.create_dataset('records', data=[float(n_measurements)])
method.create_dataset('samples', data=[float(n_images)])
method.create_dataset('dimension', data=[1])
stringDataset(method, 'type', 'Line scan')
recurse_save(saved_dict['Input'], method, 'Application Input')
data = scan1.create_group('data')
screen = data.create_group(saved_dict['Input']['Profile monitor'])
recurse_save(saved_dict['Meta_data'], screen, 'Emittance data')
for key, data_ in sorted(saved_dict['Raw_data'].items()):
if not any([x.match(key) for x in [re_axis('x'), re_axis('y'), re_gauss_function('x'), re_gauss_function('y')]]):
add_dataset(screen, key, data_, 'Camera')
#print('Created %s' % key)
if not np.all(np.isnan(gr_x_axis)):
add_dataset(screen, 'gr_x_axis', gr_x_axis, 'Camera')
else:
print('gr_x_axis is nan')
if not np.all(np.isnan(gr_y_axis)):
add_dataset(screen, 'gr_y_axis', gr_y_axis, 'Camera')
else:
print('gr_y_axis is nan')
if not np.all(np.isnan(gr_x_fit_gauss_function)):
add_dataset(screen, 'gr_x_fit_gauss_function', gr_x_fit_gauss_function, 'Camera')
else:
print('gr_x_fit_gauss_function is nan')
if not np.all(np.isnan(gr_y_fit_gauss_function)):
add_dataset(screen, 'gr_y_fit_gauss_function', gr_y_fit_gauss_function, 'Camera')
else:
print('gr_y_fit_gauss_function is nan')
if 'Magnet_data' in saved_dict:
for n_magnet, magnet in enumerate(saved_dict['Magnet_data']['Magnets']):
mag_group = method.create_group('actuators/%s' % magnet)
add_dataset(mag_group, 'K', saved_dict['Magnet_data']['K'][n_magnet], 'Magnet')
add_dataset(mag_group, 'I-SET', saved_dict['Magnet_data']['I-SET'][n_magnet], 'Magnet')
elif not saved_dict['Input']['Dry run'] in (np.array(False), False):
raise ValueError('No magnet data')
else:
print('Magnet data not saved.')

View File

@@ -220,7 +220,6 @@ class QSaveHDF(QDialog):
def get_data(self):
self.user_dict['Application'] = self.applicationLabel.text()
self.user_dict['User'] = self.author.text()
self.user_dict['Comment'] = self.comment.document().toPlainText()

View File

@@ -4,9 +4,9 @@ import os
import time
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (QComboBox, QDialog, QFileDialog, QHBoxLayout,
QLabel, QLineEdit, QPushButton, QTextEdit,
QVBoxLayout)
from qtpy.QtWidgets import (
QApplication, QComboBox, QDialog, QFileDialog, QHBoxLayout, QLabel,
QLineEdit, QPushButton, QTextEdit, QVBoxLayout)
import elog # https://github.com/paulscherrerinstitute/py_elog
from pyqtacc.bdbase.enumkind import MsgSeverity
@@ -212,7 +212,7 @@ class QSendToELOGFrame(QDialog):
self.attributes['When'] = str(time.time())
self.attributes['Wann'] = str(time.time())
QApplication.processEvents()
if self.attachFile is not None:
_attachFile = []
@@ -231,13 +231,14 @@ class QSendToELOGFrame(QDialog):
self.files.append(str(_attachFile[i]))
else:
self.files.append(self.destination + str(_attachFile[i]))
QApplication.processEvents()
el = self.elog_items.currentText()
url = self.parent.settings.data["ElogBooks"][el]["url"]
self.logbook = elog.open(url, user='robot', password='robot')
QApplication.processEvents()
try:
if self.files: