Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a1359cca84 | |||
| 25eed8079b | |||
| 5d6a74ab9d | |||
| e830a52119 | |||
| a0efa778cb | |||
| 44d9971bd8 | |||
| e3eca94b0f | |||
| 8c26ec4c15 | |||
| 7c291bcbf8 | |||
| 1f4fe75c73 | |||
| f70404c773 | |||
| fcf6b46ec0 | |||
| 6148e37df2 | |||
| 7580203c2b | |||
| 3033e07152 | |||
| 655b5c66a7 | |||
| 0f6889b54c | |||
| a6b60c30ce | |||
| 8dea32e263 | |||
| 4e701863c3 | |||
| 1fdf7013ed | |||
| 18ca4760bc | |||
| 16990824a8 | |||
| 72e4ed4f7a | |||
| 909ad9a3f3 | |||
| 2daa9ad6a7 | |||
| 2246e71e53 | |||
| 016ca6cebf | |||
| 9c3177f100 | |||
| 389a126f28 | |||
| d41bba4233 | |||
| 7a237d0d12 | |||
| 67b3c3be17 | |||
| 87df78d969 | |||
| e46f427ca2 | |||
| 64570f55c8 | |||
| 2105ba8e7d | |||
| bdad077350 | |||
| 051e361c9f | |||
| 7cac3da421 | |||
| cb7a74d189 | |||
| 9c2c5d7d37 | |||
| 3640082b59 |
512
base.py
512
base.py
@@ -5,8 +5,9 @@ from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
import getpass
|
||||
import h5py
|
||||
import logging
|
||||
import inspect
|
||||
import logging
|
||||
import numpy as np
|
||||
import platform
|
||||
import os
|
||||
import re
|
||||
@@ -25,6 +26,7 @@ from qtpy.QtWidgets import (QAction, QApplication, QDialog, QFrame, QLabel,
|
||||
QProgressBar, QScrollArea, QSizePolicy,
|
||||
QSplashScreen, QVBoxLayout, QWidget)
|
||||
|
||||
from pyqtacc.bdbase.utils import _line
|
||||
from pyqtacc.bdbase.enumkind import Facility, MsgSeverity, UserMode
|
||||
from pyqtacc.bdbase.helpbrowser import HelpBrowser
|
||||
from pyqtacc.bdbase.readjson import ReadJSON
|
||||
@@ -32,7 +34,6 @@ from pyqtacc.bdbase.savehdf import QSaveHDF
|
||||
from pyqtacc.bdbase.hdf5filemenu import HDF5GroupBox
|
||||
from pyqtacc.bdbase.sendelog import QSendToELOG
|
||||
|
||||
|
||||
from pyqtacc.bdbase.screenshot import QScreenshot
|
||||
from pyqtacc.bdbase.guiframe import GUIFrame
|
||||
|
||||
@@ -45,30 +46,18 @@ _appname, _appext = _pymodule.split(".")
|
||||
_appversion = "1.0.0"
|
||||
_author = "J. Chrin"
|
||||
|
||||
PROGRESS_BAR_THREAD_INIT = 0
|
||||
PROGRESS_BAR_THREAD_START = 1
|
||||
PROGRESS_BAR_THREAD_ABORTING = 2
|
||||
PROGRESS_BAR_THREAD_ABORTED = 3
|
||||
PROGRESS_BAR_THREAD_ERROR = 4
|
||||
PROGRESS_BAR_THREAD_END = 100
|
||||
PROGRESS_THREAD_INIT = 0
|
||||
PROGRESS_THREAD_START = 1
|
||||
PROGRESS_THREAD_ABORTING = 2
|
||||
PROGRESS_THREAD_ABORTED = 3
|
||||
PROGRESS_THREAD_ERROR = 4
|
||||
PROGRESS_THREAD_END = 100
|
||||
|
||||
CENTRAL_WIDGET_MINIMUM_HEIGHT = 840
|
||||
CENTRAL_WIDGET_MINIMUM_WIDTH = 1240
|
||||
SLS_CENTRAL_WIDGET_MINIMUM_HEIGHT = 840
|
||||
SLS_CENTRAL_WIDGET_MINIMUM_WIDTH = 940
|
||||
|
||||
def _line():
|
||||
"""Macro to return the current line number.
|
||||
|
||||
The current line number within the file is used when
|
||||
reporting messages to the message logging window.
|
||||
|
||||
Returns:
|
||||
int: Current line number.
|
||||
"""
|
||||
return inspect.currentframe().f_back.f_lineno
|
||||
|
||||
|
||||
class BaseWindow(QMainWindow):
|
||||
""" BaseWindow
|
||||
"""
|
||||
@@ -90,70 +79,100 @@ class BaseWindow(QMainWindow):
|
||||
self.time_in_seconds = time_in_seconds
|
||||
self.reanalysis_time = reanalysis_time
|
||||
self.all_data = self.parent.all_data
|
||||
self.all_data_2 = self.parent.all_data_2
|
||||
|
||||
#Causes QThread::wait: Thread tried to wait on itself
|
||||
#def __del__(self):
|
||||
# self.wait()
|
||||
|
||||
def run(self):
|
||||
attach_files = []
|
||||
folder_name = self.folder_name
|
||||
|
||||
print("Running SaveFigureThread, folder_name=", folder_name, flush=True)
|
||||
|
||||
|
||||
date_str = self.parent.add_date_to_path(
|
||||
time_in_seconds=self.time_in_seconds,
|
||||
reanalysis_time_in_seconds=self.reanalysis_time)
|
||||
|
||||
#print("date_str", date_str, flush=True)
|
||||
|
||||
write_message_fired = False
|
||||
for i, (nfig, name) in enumerate(
|
||||
zip(self.settings.data["GUI"]["resultsSeq"],
|
||||
self.settings.data["GUI"]["subResultsTabTitle"])):
|
||||
def extract_and_attach(i, nfig, name, all_fig_data):
|
||||
canvas = 'Canvas {0}'.format(i+1)
|
||||
name_base = name.replace(' ', '_').lower()
|
||||
write_message_fired = False
|
||||
|
||||
if self.all_data['Figure data'][canvas] is not None:
|
||||
nfig_canvas = len(self.all_data['Figure data'][canvas])
|
||||
if all_fig_data[canvas] is not None:
|
||||
nfig_canvas = len(all_fig_data[canvas])
|
||||
nfig_canvas = min(nfig_canvas, nfig)
|
||||
else:
|
||||
nfig_canvas = nfig
|
||||
|
||||
|
||||
|
||||
for idx in range(0, nfig_canvas):
|
||||
if self.all_data['Figure data'][canvas] is not None:
|
||||
if all_fig_data[canvas] is not None:
|
||||
|
||||
name = name_base + "_{0}".format(
|
||||
idx) if idx > 0 else name_base
|
||||
save_dest = (folder_name + date_str + '_' + name +
|
||||
'.png')
|
||||
|
||||
|
||||
|
||||
if not os.path.exists(save_dest):
|
||||
if self.all_data['Figure data'][canvas][
|
||||
idx] is not None:
|
||||
if all_fig_data[canvas][idx] is not None:
|
||||
_dirname = os.path.dirname(save_dest)
|
||||
|
||||
if os.access(_dirname, os.W_OK):
|
||||
self.all_data['Figure data'][canvas][
|
||||
idx].savefig(save_dest)
|
||||
elif not write_message_fired:
|
||||
|
||||
all_fig_data[canvas][idx].savefig(save_dest)
|
||||
elif not write_message_fired:
|
||||
|
||||
_mess = ("Do not have write permission " +
|
||||
"for directory {0} from this " +
|
||||
"host {1}. Images not saved and " +
|
||||
"cannot be sent to elog").format(
|
||||
_dirname, os.uname()[1])
|
||||
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.WARN.name, _pymodule,
|
||||
_line(), _mess, {})
|
||||
write_message_fired = True
|
||||
|
||||
attach_files.append(save_dest)
|
||||
|
||||
|
||||
if not write_message_fired:
|
||||
attach_files.append(save_dest)
|
||||
|
||||
|
||||
|
||||
|
||||
try:
|
||||
resultsSeq = self.settings.data["GUI"]["resultsSeq"]
|
||||
titleSeq = self.settings.data["GUI"]["subResultsTabTitle"]
|
||||
if self.all_data:
|
||||
fig_data = self.all_data['Figure data']
|
||||
for i, (nfig, name) in enumerate(zip(resultsSeq, titleSeq)):
|
||||
print(i, nfig, name, flush=True)
|
||||
print(fig_data, flush=True)
|
||||
extract_and_attach(i, nfig, name, fig_data)
|
||||
except KeyError as ex:
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
resultsSeq = self.settings.data["GUI2"]["resultsSeq"]
|
||||
titleSeq = self.settings.data["GUI2"]["subResultsTabTitle"]
|
||||
if self.all_data_2:
|
||||
fig_data = self.all_data_2['Figure data']
|
||||
for i, (nfig, name) in enumerate(zip(resultsSeq, titleSeq)):
|
||||
extract_and_attach(i, nfig, name, fig_data)
|
||||
except KeyError as ex:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
#Not so nice.. send a signal instead?
|
||||
if attach_files:
|
||||
self.parent.attach_files = attach_files
|
||||
print(attach_files, flush=True)
|
||||
print("All files attached", flush=True)
|
||||
else:
|
||||
print("No files to attach", flush=True)
|
||||
time.sleep(0.2) #avoid race condition
|
||||
time.sleep(0.1) #avoid race condition
|
||||
|
||||
class HDFSave(QThread):
|
||||
"""Thread for hdf analysis
|
||||
@@ -164,57 +183,81 @@ class BaseWindow(QMainWindow):
|
||||
self.parent = parent
|
||||
self.from_dialog=from_dialog
|
||||
|
||||
def __del__(self):
|
||||
self.wait()
|
||||
#Only a precaution, not experienced
|
||||
#Causes QThread::wait: Thread tried to wait on itself
|
||||
#def __del__(self):
|
||||
# self.wait()
|
||||
|
||||
def run(self):
|
||||
"""Run hdf thread
|
||||
"""
|
||||
QApplication.processEvents(QEventLoop.ExcludeUserInputEvents, 5000)
|
||||
|
||||
|
||||
self.all_data = self.parent.all_data
|
||||
#Reanalysis data
|
||||
|
||||
|
||||
if self.all_data is not None:
|
||||
ts_in_seconds = self.all_data['Ambient data']['Time in seconds']
|
||||
now_in_seconds = self.all_data['Processed data'][
|
||||
'Reanalysis time in seconds'] if \
|
||||
self.all_data['Processed data']['Reanalysis time'] else None
|
||||
try:
|
||||
if 'Time in seconds' in self.all_data['Ambient data']:
|
||||
ts_in_seconds = self.all_data['Ambient data'][
|
||||
'Time in seconds']
|
||||
else:
|
||||
ts_in_seconds = None
|
||||
except KeyError:
|
||||
ts_in_seconds = None
|
||||
|
||||
from_hdf = bool(
|
||||
self.all_data['Processed data']['Reanalysis time'])
|
||||
now_in_seconds = None
|
||||
from_hdf = False
|
||||
|
||||
try:
|
||||
if 'Reanalysis time in seconds' in self.all_data[
|
||||
'Processed data']:
|
||||
from_hdf = bool(
|
||||
self.all_data['Processed data']['Reanalysis time'])
|
||||
if from_hdf:
|
||||
now_in_seconds = self.all_data['Processed data'][
|
||||
'Reanalysis time in seconds']
|
||||
|
||||
#Double check
|
||||
if not from_hdf:
|
||||
if 'from_hdf' in self.all_data['Processed data']:
|
||||
from_hdf = bool(self.all_data['Processed data'][
|
||||
'from_hdf'])
|
||||
|
||||
except KeyError:
|
||||
now_in_seconds = None
|
||||
|
||||
self.parent.from_hdf = from_hdf
|
||||
|
||||
print("t=========================>", ts_in_seconds, " // ", now_in_seconds)
|
||||
print("from hdf5=========================>", from_hdf)
|
||||
|
||||
#print("Parent Thread before ==>", self.parent.hdf_filename)
|
||||
if self.parent.hdf_filename is None or not self.from_dialog:
|
||||
self.parent.set_new_hdf_filename(ts_in_seconds,
|
||||
now_in_seconds)
|
||||
#else:
|
||||
# self.parent.set_new_hdf_filename(ts_in_seconds,
|
||||
# now_in_seconds)
|
||||
#print("Parent Thread after ==>", self.parent.hdf_filename)
|
||||
|
||||
#Open hdf5file here and
|
||||
#mess = "HDF save to file {0} proceeding...".format(
|
||||
# self.parent.hdf_filename)
|
||||
#self.parent.trigger_log_message.emit(MsgSeverity.INFO.name,
|
||||
# _pymodule, _line(),
|
||||
# mess, {})
|
||||
try:
|
||||
print("FILENAME ==", self.parent.hdf_filename, flush=True)
|
||||
with h5py.File(self.parent.hdf_filename, 'w') as dataH5:
|
||||
self.parent.add_pvs_to_hdf(
|
||||
dataH5, pv_list=self.parent.pv_machine_list,
|
||||
from_hdf=from_hdf)
|
||||
|
||||
#experiment
|
||||
if not from_hdf:
|
||||
self.parent.add_pvs_to_hdf(
|
||||
dataH5, pv_list=self.parent.pv_machine_list,
|
||||
from_hdf=from_hdf)
|
||||
|
||||
#general
|
||||
#if not from_hdf:
|
||||
self.parent.add_general_to_hdf(dataH5)
|
||||
self.parent.add_to_hdf(dataH5, proc=True, raw=True)
|
||||
|
||||
self.parent.hdf_save_completed = True
|
||||
|
||||
|
||||
_mess = "Processed data saved to {}".format(
|
||||
self.parent.hdf_filename)
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.INFO.name, _pymodule, _line(), _mess, {})
|
||||
MsgSeverity.INFO.name, _pymodule, _line(), _mess,
|
||||
{})
|
||||
except OSError as e:
|
||||
_mess = "OSError in saving to file {0}: {1}".format(
|
||||
_mess = "OSError in saving to file {0}: \n{1}".format(
|
||||
self.parent.hdf_filename, str(e))
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.ERROR.name, _pymodule, _line(), _mess, {})
|
||||
@@ -233,19 +276,37 @@ class BaseWindow(QMainWindow):
|
||||
self.all_data = all_data
|
||||
self.hdf_filename_loaded = self.parent.hdf_filename_loaded
|
||||
|
||||
def __del__(self):
|
||||
self.wait()
|
||||
#Only a precaution, not experienced
|
||||
#Causes QThread::wait: Thread tried to wait on itself
|
||||
#def __del__(self):
|
||||
# self.wait()
|
||||
|
||||
def run(self):
|
||||
"""Run hdf thread
|
||||
"""
|
||||
|
||||
if not hasattr(self.analysis_procedure, 'load_hdf_file'):
|
||||
mess = ("Analysis not configured for HDF analysis! " +
|
||||
"Missing method: load_hdf_file")
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.ERROR.name,_pymodule, _line(), mess, {})
|
||||
self.parent.trigger_progressbar.emit(PROGRESS_THREAD_END)
|
||||
return
|
||||
|
||||
self.all_data = self.analysis_procedure.load_hdf_file(
|
||||
self.hdf_filename_loaded)
|
||||
|
||||
if not self.all_data:
|
||||
self.parent.trigger_progressbar.emit(PROGRESS_THREAD_END)
|
||||
return
|
||||
|
||||
if not hasattr(self.analysis_procedure, 'reanalyze'):
|
||||
mess = ("Analysis not configured for HDF analysis! " +
|
||||
"Missing method: reanalyze")
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.ERROR.name, _pymodule, _line(), mess, {})
|
||||
self.parent.trigger_progressbar.emit(PROGRESS_THREAD_END)
|
||||
return
|
||||
try:
|
||||
expt_dict = self.all_data['experiment']
|
||||
except KeyError:
|
||||
@@ -266,6 +327,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
# Emit results
|
||||
if all_dict is not None:
|
||||
self.parent.from_hdf = True
|
||||
self.trigger_thread_event.emit(all_dict)
|
||||
mess = "HDF file {} analysis succeeded".format(
|
||||
self.hdf_filename_loaded)
|
||||
@@ -283,14 +345,19 @@ class BaseWindow(QMainWindow):
|
||||
"""
|
||||
trigger_thread_event = Signal(dict)
|
||||
|
||||
def __init__(self, parent, analysis_procedure, input_parameters):
|
||||
def __init__(self, parent, analysis_procedure, input_parameters,
|
||||
messages: dict={
|
||||
"success": "Analysis completed", "fail":
|
||||
"No data returned from analysis procedure"}):
|
||||
|
||||
QThread.__init__(self)
|
||||
self.parent = parent
|
||||
self.analysis_procedure = analysis_procedure
|
||||
self.input_parameters = input_parameters
|
||||
self.messages = messages
|
||||
try:
|
||||
if input_parameters['debug']:
|
||||
print("AnalysisThread", self.input_parameters)
|
||||
print("AnalysisThread", self.input_parameters, flush=True)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
@@ -300,20 +367,19 @@ class BaseWindow(QMainWindow):
|
||||
def run(self):
|
||||
"""Run thread
|
||||
"""
|
||||
|
||||
|
||||
print("RUN IN BASE CLASS", flush=True)
|
||||
|
||||
all_dict = self.analysis_procedure.measure_and_analyze(
|
||||
self.input_parameters)
|
||||
|
||||
# Emit results
|
||||
if all_dict:
|
||||
self.trigger_thread_event.emit(all_dict)
|
||||
|
||||
mess = "Analysis completed"
|
||||
mess = self.messages['success']
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.INFO.name, _pymodule, _line(), mess, {})
|
||||
else:
|
||||
mess = "No data returned from analysis procedure."
|
||||
mess = self.messages['fail']
|
||||
self.parent.trigger_log_message.emit(
|
||||
MsgSeverity.WARN.name, _pymodule, _line(), mess, {})
|
||||
|
||||
@@ -343,7 +409,6 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
self.settings = ReadJSON(self.appname)
|
||||
|
||||
|
||||
#Read out current_logbook
|
||||
|
||||
self.cafe = PyCafe.CyCafe()
|
||||
@@ -363,10 +428,10 @@ class BaseWindow(QMainWindow):
|
||||
self.hdf_filename_loaded = "NONE" #For loading into hdf dockwidget
|
||||
self.hdf_filename = None #For saving
|
||||
self.hdf_dialog = None
|
||||
self.from_hdf = False
|
||||
|
||||
self.daq_analysis_completed = False
|
||||
|
||||
|
||||
self.setObjectName("MainWindow")
|
||||
self.setWindowTitle(self.appname)
|
||||
|
||||
@@ -376,20 +441,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
self.menu = self.menuBar()
|
||||
|
||||
|
||||
'''
|
||||
try:
|
||||
dirname = self.settings.data["stdout"]["destination"]
|
||||
except KeyError:
|
||||
dirname = "/tmp/"
|
||||
|
||||
if not os.path.exists(dirname):
|
||||
os.mkdir(dirname)
|
||||
|
||||
fname = dirname + self.appname + ".log"
|
||||
file_obj = os.open(fname, os.O_RDWR|os.O_CREAT)
|
||||
os.close(file_obj)
|
||||
'''
|
||||
|
||||
|
||||
self.elog_dest = self.settings.data["Elog"]["destination"]
|
||||
self.screenshot_dest = self.settings.data["screenshot"]["destination"]
|
||||
@@ -398,7 +450,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
self.logging = logging
|
||||
#self.logging.basicConfig(filename=self.stdlog_dest, level=logging.DEBUG)
|
||||
self.logging.basicConfig(level=logging.DEBUG)
|
||||
self.logging.basicConfig(level=logging.NOTSET)
|
||||
self.logger = self.logging.getLogger(__name__)
|
||||
self.logger.info("Logging activated")
|
||||
|
||||
@@ -444,7 +496,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
self.read_input_parameters()
|
||||
self.all_data = {}
|
||||
|
||||
self.all_data_2 = {}
|
||||
self.hdf_thread = None
|
||||
self.save_hdf_thread = None
|
||||
self.analysis_thread = None
|
||||
@@ -452,8 +504,11 @@ class BaseWindow(QMainWindow):
|
||||
try:
|
||||
from src.analysis import AnalysisProcedure
|
||||
self.analysis_procedure = AnalysisProcedure(self)
|
||||
print("Base class has user supplied AnalysisProcedure class.",
|
||||
flush=True)
|
||||
except ImportError as e:
|
||||
print("Import Error:", e)
|
||||
print(("Base class without user supplied AnalysisProcedure class."
|
||||
+ " import Error:"), e, flush=True)
|
||||
|
||||
##self.trigger_elog_entry.connect(self.receive_elog_notification)
|
||||
##self.trigger_hdf_save.connect(self.save_to_hdf)
|
||||
@@ -508,7 +563,9 @@ class BaseWindow(QMainWindow):
|
||||
elif self.facility == Facility.SLS:
|
||||
from pyqtacc.sls.guiheader import GUIHeader
|
||||
from pyqtacc.sls.sendelogsls import QSendToELOG
|
||||
|
||||
elif self.facility == Facility.HIPA:
|
||||
from pyqtacc.hipa.guiheader import GUIHeader
|
||||
from pyqtacc.hipa.sendeloghipa import QSendToELOG
|
||||
|
||||
self.gui_header = GUIHeader(self, user_mode=self.user_mode,
|
||||
extended=extended)
|
||||
@@ -533,7 +590,7 @@ class BaseWindow(QMainWindow):
|
||||
self.mainwindow.setMinimumWidth(SLS_CENTRAL_WIDGET_MINIMUM_WIDTH)
|
||||
|
||||
self.setCentralWidget(self.mainwindow)
|
||||
self.show_log_message(MsgSeverity.INFO, _pymodule, _line(),
|
||||
self.show_log_message(MsgSeverity.INFO.name, _pymodule, _line(),
|
||||
"Application configured")
|
||||
|
||||
|
||||
@@ -840,15 +897,21 @@ class BaseWindow(QMainWindow):
|
||||
_mess, QMessageBox.Ok)
|
||||
return False
|
||||
|
||||
if self.daq_analysis_completed and not self.hdf_save_completed:
|
||||
if not self.all_data['Processed data']['Reanalysis time']:
|
||||
_mess = ("Are you sure you wish to exit " +
|
||||
"without saving data to HDF?")
|
||||
qm = QMessageBox()
|
||||
reply = qm.warning(self, "Exit", _mess,
|
||||
QMessageBox.Yes | QMessageBox.No)
|
||||
if reply == QMessageBox.No:
|
||||
return False
|
||||
if self.daq_analysis_completed and not self.hdf_save_completed \
|
||||
and not self.from_hdf:
|
||||
if self.all_data is not None:
|
||||
try:
|
||||
if 'Reanalysis time' in self.all_data['Processed data']:
|
||||
if not self.all_data['Processed data']['Reanalysis time']:
|
||||
_mess = ("Are you sure you wish to exit " +
|
||||
"without saving data to HDF?")
|
||||
qm = QMessageBox()
|
||||
reply = qm.warning(self, "Exit", _mess,
|
||||
QMessageBox.Yes | QMessageBox.No)
|
||||
if reply == QMessageBox.No:
|
||||
return False
|
||||
except KeyError:
|
||||
pass
|
||||
return True
|
||||
|
||||
@Slot()
|
||||
@@ -858,8 +921,11 @@ class BaseWindow(QMainWindow):
|
||||
#Close all dock widgets
|
||||
#self.removeDockWidget(self.hdf_dock_widget)
|
||||
self.logger.info("Closing Application")
|
||||
print("Closing Application", flush=True)
|
||||
self.save_application_settings()
|
||||
QApplication.processEvents()
|
||||
#print( ("Stopping Monitors. This may on occasion lead to " +
|
||||
# "NO CYTHON CALLBACK MATCH FOUND notices"), flush=True)
|
||||
self.cafe.monitorStopAll()
|
||||
time.sleep(0.05)
|
||||
self.cafe.terminate()
|
||||
@@ -950,9 +1016,11 @@ class BaseWindow(QMainWindow):
|
||||
QApplication.processEvents()
|
||||
if self.autopost_epics:
|
||||
self.save_to_epics()
|
||||
|
||||
QApplication.processEvents()
|
||||
if self.autopost_hdf:
|
||||
self.save_to_hdf()
|
||||
|
||||
QApplication.processEvents()
|
||||
if self.autopost_elog:
|
||||
self.send_to_elog()
|
||||
@@ -1018,7 +1086,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
if not self.daq_analysis_completed:
|
||||
QMessageBox.information(self, "HDF", (
|
||||
("No data to save to hdf5; no measurement undertaken!")),
|
||||
("No data to save to hdf; no measurement undertaken!")),
|
||||
QMessageBox.Ok)
|
||||
QApplication.processEvents()
|
||||
return False
|
||||
@@ -1035,9 +1103,11 @@ class BaseWindow(QMainWindow):
|
||||
return True
|
||||
|
||||
|
||||
def add_to_hdf(self): # dataH5=None, proc=True, raw=False):
|
||||
""" Abstract method to be overwritten by user
|
||||
def add_to_hdf(self, dataH5=None, proc=True, raw=False):
|
||||
""" Abstract method to be overwritten by user. Optional.
|
||||
"""
|
||||
|
||||
'''
|
||||
QM = QMessageBox()
|
||||
QM.setText(
|
||||
str(NotImplementedError("add_to_hdf method has not been " +
|
||||
@@ -1046,6 +1116,8 @@ class BaseWindow(QMainWindow):
|
||||
"icon from the application/config file."))
|
||||
)
|
||||
QM.exec()
|
||||
'''
|
||||
return
|
||||
|
||||
@Slot()
|
||||
def save_to_hdf(self):
|
||||
@@ -1085,8 +1157,9 @@ class BaseWindow(QMainWindow):
|
||||
print(_mess, flush=True)
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.WARN.name, _pymodule, _line(), _mess,
|
||||
{})
|
||||
{})
|
||||
return isOK
|
||||
|
||||
|
||||
if from_hdf:
|
||||
return isOK
|
||||
@@ -1134,8 +1207,11 @@ class BaseWindow(QMainWindow):
|
||||
user_dict['User'] = getpass.getuser()
|
||||
|
||||
if self.all_data is not None:
|
||||
time_in_seconds = self.all_data['Ambient data']['Time in seconds']
|
||||
now = datetime.fromtimestamp(time_in_seconds)
|
||||
if 'Time in seconds' in self.all_data['Ambient data']:
|
||||
time_in_seconds = self.all_data['Ambient data']['Time in seconds']
|
||||
now = datetime.fromtimestamp(time_in_seconds)
|
||||
else:
|
||||
now = datetime.now()
|
||||
else:
|
||||
now = datetime.now()
|
||||
|
||||
@@ -1163,23 +1239,20 @@ class BaseWindow(QMainWindow):
|
||||
""" This uses the widget interface to allow the user to enter
|
||||
additional meta-data
|
||||
"""
|
||||
|
||||
|
||||
if not self.verify_save_to_hdf():
|
||||
return False
|
||||
|
||||
input_options = OrderedDict()
|
||||
|
||||
#print(self.all_data['Ambient data'])
|
||||
#print(self.all_data['Processed data'])
|
||||
|
||||
#QCombobox if list
|
||||
#input_options['QComboBox'] = ['one', 'two', 'three']
|
||||
#input_options['Comment'] = 'Please enter a comment'
|
||||
if self.all_data is not None:
|
||||
|
||||
ts_in_seconds = self.all_data['Ambient data']['Time in seconds']
|
||||
now_in_seconds = self.all_data['Processed data'][
|
||||
'Reanalysis time in seconds'] if \
|
||||
self.all_data['Processed data']['Reanalysis time'] else None
|
||||
now_in_seconds = None
|
||||
if 'Reanalysis time in seconds' in self.all_data['Processed data']:
|
||||
if self.all_data['Processed data']['Reanalysis time']:
|
||||
now_in_seconds = self.all_data['Processed data'][
|
||||
'Reanalysis time in seconds']
|
||||
self.set_new_hdf_filename(ts_in_seconds, now_in_seconds)
|
||||
|
||||
input_options['Destination'] = self.hdf_filename
|
||||
@@ -1188,9 +1261,7 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
self.hdf_dialog = QSaveHDF(self, input_options=input_options,
|
||||
from_dialog=True)
|
||||
#user_dict = self.hdf_dialog.get_data()
|
||||
#self.hdf_filename = user_dict['Destination']
|
||||
#print("filename", self.hdf_filename)
|
||||
|
||||
|
||||
def verify_send_to_elog(self):
|
||||
|
||||
@@ -1213,12 +1284,11 @@ class BaseWindow(QMainWindow):
|
||||
if self.save_hdf_thread.isRunning():
|
||||
return True
|
||||
|
||||
if not self.hdf_save_completed:
|
||||
if not self.hdf_save_completed and not self.from_hdf:
|
||||
_mess = ("Opening ELOG, but please note that data have not " +
|
||||
"been saved to HDF. " +
|
||||
"<br>Click on the HDF icon to do this if desired")
|
||||
QMessageBox.information(self, "ELOG", _mess,
|
||||
QMessageBox.Ok)
|
||||
QMessageBox.information(self, "ELOG", _mess, QMessageBox.Ok)
|
||||
return True
|
||||
|
||||
return True
|
||||
@@ -1227,7 +1297,7 @@ class BaseWindow(QMainWindow):
|
||||
def send_to_elog(self):
|
||||
""" Response to elog_action; normally overwritten
|
||||
"""
|
||||
if not self.verify_send_to_elog():
|
||||
if not self.verify_send_to_elog():
|
||||
return
|
||||
'''
|
||||
if self.analysis_thread is not None:
|
||||
@@ -1353,9 +1423,10 @@ class BaseWindow(QMainWindow):
|
||||
"""
|
||||
if self.all_data:
|
||||
#Data from hdf analysis - do not save to epics
|
||||
if self.all_data['Processed data']['Reanalysis time']:
|
||||
print("HDF RUN - data not written to epics")
|
||||
return False
|
||||
if 'Reanalysis time' in self.all_data['Processed data']:
|
||||
if self.all_data['Processed data']['Reanalysis time']:
|
||||
print("HDF RUN - data not written to epics")
|
||||
return False
|
||||
if self.all_data['Input data']['simulation']:
|
||||
return False
|
||||
else:
|
||||
@@ -1425,9 +1496,18 @@ class BaseWindow(QMainWindow):
|
||||
def send_to_epics(self, pv_dict: dict = None, pv_names: list = None,
|
||||
pv_values: list = None) -> (int, list):
|
||||
|
||||
|
||||
if pv_dict is not None:
|
||||
pv_values = []
|
||||
pv_names = list(pv_dict.keys())
|
||||
pv_values = list(pv_dict.values())
|
||||
for val in pv_dict.values():
|
||||
if isinstance(val, np.ndarray):
|
||||
pv_values.append(val.tolist())
|
||||
else:
|
||||
pv_values.append(val)
|
||||
print("SAVE TO EPICS", flush=True)
|
||||
print(pv_names, flush=True)
|
||||
print(pv_values, flush=True)
|
||||
else:
|
||||
if len(pv_names) != len(pv_values):
|
||||
_mess = ("len(pv_values)={0} does not match " +
|
||||
@@ -1443,7 +1523,17 @@ class BaseWindow(QMainWindow):
|
||||
self.cafe.open(pv_names)
|
||||
self.cafe.openNowAndWait(0.4)
|
||||
|
||||
status, status_list = self.cafe.setCompoundList(pv_names, pv_values)
|
||||
status = self.cyca.ICAFE_NORMAL
|
||||
status_list = []
|
||||
try:
|
||||
status, status_list = self.cafe.setCompoundList(pv_names, pv_values)
|
||||
except:
|
||||
print("Exception raised in cafe.setCompoundList", flush=True)
|
||||
status = self.cyca.ECAFE_BADTYPE
|
||||
for pv, val in zip(pv_names, pv_values):
|
||||
print("pv/val", pv, val, flush=True)
|
||||
status_list.append(self.ECAFE_BADTYPE)
|
||||
|
||||
|
||||
if status != self.cyca.ICAFE_NORMAL:
|
||||
ibad = 0
|
||||
@@ -1562,8 +1652,8 @@ class BaseWindow(QMainWindow):
|
||||
self.progressbar_abort = "abort"
|
||||
self.progressbar_color = self.progressbar_standard
|
||||
self.progressbar.setObjectName(self.progressbar_color)
|
||||
self.progressbar.setRange(PROGRESS_BAR_THREAD_START,
|
||||
PROGRESS_BAR_THREAD_END)
|
||||
self.progressbar.setRange(PROGRESS_THREAD_START,
|
||||
PROGRESS_THREAD_END)
|
||||
self.progressbar.setTextVisible(True)
|
||||
self.progressbar.setAlignment(Qt.AlignCenter)
|
||||
self.progressbar.setVisible(False)
|
||||
@@ -1635,7 +1725,7 @@ class BaseWindow(QMainWindow):
|
||||
_mess, QMessageBox.Ok)
|
||||
return
|
||||
|
||||
self.hdf_thread_started()
|
||||
#self.hdf_thread_started()
|
||||
self.statusbar.showMessage("Loading {0}".format(
|
||||
self.hdf_filename_loaded))
|
||||
self.trigger_progressbar_str.emit(
|
||||
@@ -1652,7 +1742,7 @@ class BaseWindow(QMainWindow):
|
||||
self.hdf_thread.trigger_thread_event.connect(
|
||||
self.receive_analysis_results)
|
||||
#procedure moved above
|
||||
#self.hdf_thread.started.connect(self.hdf_thread_started)
|
||||
self.hdf_thread.started.connect(self.hdf_thread_started)
|
||||
self.hdf_thread.finished.connect(self.hdf_thread_finished)
|
||||
|
||||
self.hdf_thread.start()
|
||||
@@ -1665,7 +1755,8 @@ class BaseWindow(QMainWindow):
|
||||
|
||||
@Slot()
|
||||
def start_analysis_thread(self):
|
||||
|
||||
'''Slot to self.start_wgt button trigger in guiframe.py
|
||||
'''
|
||||
if not self.analysis_procedure:
|
||||
mess = "Analysis thread not configured for this application"
|
||||
self.show_log_message(MsgSeverity.ERROR, _pymodule, _line(), mess)
|
||||
@@ -1699,10 +1790,11 @@ class BaseWindow(QMainWindow):
|
||||
self.analysis_thread.started.connect(self.analysis_thread_started)
|
||||
self.analysis_thread.finished.connect(self.analysis_thread_finished)
|
||||
|
||||
|
||||
|
||||
self.analysis_thread.start()
|
||||
QApplication.processEvents()
|
||||
|
||||
|
||||
@Slot()
|
||||
def analysis_thread_started(self):
|
||||
""" Change state of widgets when measuring
|
||||
@@ -1725,7 +1817,7 @@ class BaseWindow(QMainWindow):
|
||||
"""
|
||||
self.gui_frame.in_hdf_measurement_procedure()
|
||||
QApplication.processEvents()
|
||||
#print("Thread Started")
|
||||
|
||||
|
||||
@Slot()
|
||||
def hdf_thread_finished(self):
|
||||
@@ -1739,6 +1831,8 @@ class BaseWindow(QMainWindow):
|
||||
@Slot(dict)
|
||||
def receive_analysis_results(self, all_dict):
|
||||
self.all_data = all_dict
|
||||
print("self.all_data", self.all_data.keys(), flush=True)
|
||||
|
||||
self.gui_frame.canvas_update(all_dict['Figure data'])
|
||||
|
||||
if self.gui_frame.results_output_wgt_dict:
|
||||
@@ -1750,9 +1844,47 @@ class BaseWindow(QMainWindow):
|
||||
self.gui_frame.send_to_results_output_wgt(results_data)
|
||||
except:
|
||||
pass
|
||||
|
||||
#print("IDX+++", self.gui_frame.central_tab_widget.indexOf('Emittance'), flush=True)
|
||||
#print("IDX+++", self.gui_frame.level2_tab_wgt[0].indexOf('Plots'))
|
||||
#self.gui_frame.central_tab_widget.setCurrentIndex(1)
|
||||
|
||||
self.gui_frame.central_tab_widget.setCurrentIndex(1)
|
||||
self.gui_frame.results_tab_wgt.setCurrentIndex(0)
|
||||
self.gui_frame.results_tab_wgt.setCurrentIndex(0)
|
||||
|
||||
if "GUITree" in self.settings.data:
|
||||
|
||||
#for j in range(len(self.gui_frame.level1_tab_wgt)):
|
||||
j = self.gui_frame.central_tab_widget.currentIndex()
|
||||
for i in range(self.gui_frame.level1_tab_wgt[j].count()):
|
||||
print(j, i, self.gui_frame.level1_tab_wgt[j].tabText(i), flush=True)
|
||||
if self.gui_frame.level1_tab_wgt[j].tabText(i) == "Plots":
|
||||
self.gui_frame.level1_tab_wgt[j].setCurrentIndex(i)
|
||||
else:
|
||||
pass
|
||||
else:
|
||||
|
||||
for i in range(self.gui_frame.central_tab_widget.count()):
|
||||
print(i, self.gui_frame.central_tab_widget.tabText(i), flush=True)
|
||||
if self.gui_frame.central_tab_widget.tabText(i) == "Plots":
|
||||
self.gui_frame.central_tab_widget.setCurrentIndex(i)
|
||||
else:
|
||||
pass
|
||||
|
||||
for i in range(self.gui_frame.measurement_tab_wgt.count()):
|
||||
print(i, self.gui_frame.measurement_tab_wgt.tabText(i), flush=True)
|
||||
if self.gui_frame.measurement_tab_wgt.tabText(i) == "Plots":
|
||||
self.gui_frame.measurement_tab_wgt.setCurrentIndex(i)
|
||||
else:
|
||||
pass
|
||||
|
||||
for i in range(self.gui_frame.results_tab_wgt.count()):
|
||||
print(i, self.gui_frame.results_tab_wgt.tabText(i), flush=True)
|
||||
if self.gui_frame.results_tab_wgt.tabText(i) == "Plots":
|
||||
self.gui_frame.results_tab_wgt.setCurrentIndex(i)
|
||||
else:
|
||||
pass
|
||||
|
||||
print("receive_analysis_results=========================>", flush=True)
|
||||
|
||||
@Slot()
|
||||
def receive_abort_analysis(self):
|
||||
@@ -1763,7 +1895,7 @@ class BaseWindow(QMainWindow):
|
||||
self.gui_frame.in_abort_procedure()
|
||||
# Trigger abort signal to the analysis thread
|
||||
self.analysis_procedure.trigger_abort.emit()
|
||||
#self.trigger_progressbar.emit(PROGRESS_BAR_THREAD_ABORTING)
|
||||
#self.trigger_progressbar.emit(PROGRESS_THREAD_ABORTING)
|
||||
QApplication.processEvents()
|
||||
|
||||
|
||||
@@ -1786,19 +1918,19 @@ class BaseWindow(QMainWindow):
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if value == PROGRESS_BAR_THREAD_INIT:
|
||||
if value == PROGRESS_THREAD_INIT:
|
||||
self.progressbar.setVisible(False)
|
||||
self.progressbar.setFormat("")
|
||||
self.progressbar.reset()
|
||||
self.progressbar.setObjectName(self.progressbar_color)
|
||||
self.statusbar.clearMessage()
|
||||
elif value == PROGRESS_BAR_THREAD_START:
|
||||
elif value == PROGRESS_THREAD_START:
|
||||
self.statusbar.clearMessage()
|
||||
self.progressbar.setFormat("Measurement started")
|
||||
self.progressbar.setValue(value)
|
||||
self.progressbar.setObjectName(self.progressbar_color)
|
||||
self.daq_analysis_completed = False
|
||||
elif value == PROGRESS_BAR_THREAD_ABORTING:
|
||||
elif value == PROGRESS_THREAD_ABORTING:
|
||||
self.progressbar.setFormat(
|
||||
"Aborting procedure at the next available break point")
|
||||
self.progressbar.setObjectName(self.progressbar_abort)
|
||||
@@ -1811,7 +1943,7 @@ class BaseWindow(QMainWindow):
|
||||
self.show_log_message(
|
||||
MsgSeverity.WARN.name, _pymodule, _line(), mess)
|
||||
#self.statusbar.showMessage(mess)
|
||||
elif value == PROGRESS_BAR_THREAD_ABORTED:
|
||||
elif value == PROGRESS_THREAD_ABORTED:
|
||||
self.progressbar.setFormat("Procedure aborted")
|
||||
self.progressbar.setObjectName(self.progressbar_abort)
|
||||
mess = "Measurement procedure aborted"
|
||||
@@ -1820,15 +1952,15 @@ class BaseWindow(QMainWindow):
|
||||
self.statusbar.showMessage(mess)
|
||||
self.daq_analysis_completed = False
|
||||
QTimer.singleShot(2000, lambda: self.trigger_progressbar.emit(
|
||||
PROGRESS_BAR_THREAD_INIT))
|
||||
elif value == PROGRESS_BAR_THREAD_ERROR:
|
||||
PROGRESS_THREAD_INIT))
|
||||
elif value == PROGRESS_THREAD_ERROR:
|
||||
mess = "Error in Thread. No data returned! See Log window"
|
||||
self.progressbar.setFormat(mess)
|
||||
self.progressbar.setObjectName(self.progressbar_abort)
|
||||
self.statusbar.showMessage(mess)
|
||||
QTimer.singleShot(10000, lambda: self.trigger_progressbar.emit(
|
||||
PROGRESS_BAR_THREAD_INIT))
|
||||
elif value == PROGRESS_BAR_THREAD_END:
|
||||
PROGRESS_THREAD_INIT))
|
||||
elif value == PROGRESS_THREAD_END:
|
||||
self.progressbar.setFormat("Measurement completed")
|
||||
self.progressbar.setValue(value)
|
||||
self.progressbar.setObjectName(self.progressbar_color)
|
||||
@@ -1864,6 +1996,9 @@ class BaseWindow(QMainWindow):
|
||||
elif facility == Facility.SLS:
|
||||
from pyqtacc.qrc_resources.facility.sls.pyrcc5 import qrc_resources
|
||||
print("FACILITY SLS")
|
||||
elif facility == Facility.HIPA:
|
||||
from pyqtacc.qrc_resources.facility.hipa.pyrcc5 import qrc_resources
|
||||
print("FACILITY HIPA")
|
||||
else:
|
||||
print("Unknown Facility; assuming SLS")
|
||||
from pyqtacc.qrc_resources.facility.sls.pyrcc5 import qrc_resources
|
||||
@@ -1919,6 +2054,12 @@ class BaseWindow(QMainWindow):
|
||||
width = 860 + (len(appname)-10)*15
|
||||
height = 220
|
||||
self.splash_screen.resize(width, height)
|
||||
|
||||
#Maybe useful at some point
|
||||
#pSplashNotice = QCheckBox(self.splash_screen);
|
||||
#pSplashNotice.setChecked(Qt.Checked)
|
||||
|
||||
|
||||
self.splash_progressbar = QProgressBar(self.splash_screen)
|
||||
self.splash_timer = QTimer()
|
||||
self.splash_screen.show()
|
||||
@@ -1951,6 +2092,7 @@ class BaseWindow(QMainWindow):
|
||||
seconds_remaining = '{:2d}'.format(int_seconds_remaining)
|
||||
self.splash_progressbar.setValue(val)
|
||||
self.processEvents()
|
||||
self.flush()
|
||||
sec_str = "s" if abs(int_seconds_remaining) != 1 else ""
|
||||
mess = """
|
||||
<br><p style='color:black; font-weight:bold;
|
||||
@@ -1990,3 +2132,59 @@ class BaseWindow(QMainWindow):
|
||||
<br></p>
|
||||
""".format(self.splash_appname), Qt.AlignCenter | Qt.AlignTop)
|
||||
self.splash_screen.finish(myapp)
|
||||
|
||||
|
||||
|
||||
def check_status_list(self, pymodule: str = _pymodule,
|
||||
operation: str = "channel access",
|
||||
pv_list: list = None, status_list: list = None,
|
||||
line: int = _line()):
|
||||
|
||||
if None in (pv_list, status_list):
|
||||
return
|
||||
|
||||
brk = ("------------------------------------------------------" +
|
||||
"------------------------------------------------------")
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.INFO.name, pymodule, line, brk, {})
|
||||
|
||||
options = {}
|
||||
|
||||
for i, (pv, stat) in enumerate(zip(pv_list, status_list)):
|
||||
if stat == self.cyca.ICAFE_NORMAL:
|
||||
mess = "Error in '{0}' for element [{1}], {2}.".format(
|
||||
operation, i, pv)
|
||||
options['statusCode'] = (
|
||||
str(stat) + " " +
|
||||
self.cafe.getStatusCodeAsString(stat))
|
||||
options['statusInfo'] = self.cafe.getStatusInfo(stat)
|
||||
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.WARN.name, pymodule, line, mess, options)
|
||||
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.INFO.name, pymodule, line, brk, {})
|
||||
|
||||
mess = ("The following devices reported an error " +
|
||||
"in channel access operation:")
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.INFO.name, pymodule, line, mess, {})
|
||||
|
||||
|
||||
def check_status(self, pymodule: str = _pymodule,
|
||||
operation: str = "channel access",
|
||||
pv: str = None, stat: int = None,
|
||||
line: int =_line()):
|
||||
|
||||
if None in (pv, status):
|
||||
return
|
||||
|
||||
if stat != self.cyca.ICAFE_NORMAL:
|
||||
mess = "Error in '{0}' for {1}.".format(operation, pv)
|
||||
options = {}
|
||||
options['statusCode'] = (
|
||||
str(stat) + " " +
|
||||
self.cafe.getStatusCodeAsString(stat))
|
||||
options['statusInfo'] = self.cafe.getStatusInfo(stat)
|
||||
self.trigger_log_message.emit(
|
||||
MsgSeverity.WARN.name, pymodule, line, mess, options)
|
||||
|
||||
@@ -36,6 +36,7 @@ class UserMode(IntEnum):
|
||||
OPERATION = 1
|
||||
EXPERT = 2
|
||||
SIMULATION = 3
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1547
guiframe.py
1547
guiframe.py
File diff suppressed because it is too large
Load Diff
327
h5_storage.py
Normal file
327
h5_storage.py
Normal file
@@ -0,0 +1,327 @@
|
||||
import getpass
|
||||
import time
|
||||
import re
|
||||
from functools import lru_cache
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
dt = h5py.special_dtype(vlen=bytes)
|
||||
numerical_types = (np.dtype('float64'), np.dtype('float32'), np.dtype('uint16'), np.dtype('uint64'), np.dtype('uint32'))
|
||||
|
||||
def stringDataset(group, name, data, system=None):
|
||||
dset = group.create_dataset(name, (1,), dtype=dt, data=data)
|
||||
if system:
|
||||
addSystemAttribute(dset, system)
|
||||
|
||||
def addStringAttribute(dset_or_group, name, data):
|
||||
#return dset_or_group.attrs.create(name, np.string_(data)) # , (1,), dtype=dt)
|
||||
dset_or_group.attrs[name] = bytes(data, 'utf-8')
|
||||
|
||||
def addSystemAttribute(dset_or_group, data):
|
||||
addStringAttribute(dset_or_group, 'system', data)
|
||||
|
||||
def add_dataset(group, name, data, system=None, dtype=None):
|
||||
if type(data) is str:
|
||||
stringDataset(group, name, data, system)
|
||||
else:
|
||||
if dtype:
|
||||
dset = group.create_dataset(name, data=data, dtype=dtype)
|
||||
else:
|
||||
try:
|
||||
dset = group.create_dataset(name, data=data)
|
||||
except Exception as e:
|
||||
dset = None
|
||||
print('Error for dataset %s' % name)
|
||||
print('Continuing')
|
||||
print(e)
|
||||
|
||||
if dset is not None and system:
|
||||
addSystemAttribute(dset, system)
|
||||
|
||||
def saveH5Recursive(h5_filename, data_dict, dataH5=None):
|
||||
|
||||
def recurse_save(group, dict_or_data, dict_or_data_name, new_group=None):
|
||||
|
||||
if dict_or_data is None:
|
||||
dict_or_data = 'None'
|
||||
if group is None:
|
||||
print("'recurse_save' has been called with None")
|
||||
raise ValueError
|
||||
|
||||
|
||||
|
||||
if type(dict_or_data) is dict:
|
||||
|
||||
try:
|
||||
new_group = group.create_group(dict_or_data_name)
|
||||
except Exception as e:
|
||||
print("Error in group.create_group", str(e))
|
||||
return
|
||||
|
||||
if new_group is None:
|
||||
raise ValueError
|
||||
for key, val in dict_or_data.items():
|
||||
try:
|
||||
recurse_save(new_group, val, key)
|
||||
except ValueError:
|
||||
print('I called recurse_save with None')
|
||||
#import pdb; pdb.set_trace()
|
||||
|
||||
else:
|
||||
mydata = dict_or_data
|
||||
inner_key = dict_or_data_name
|
||||
|
||||
|
||||
|
||||
if type(mydata) is str:
|
||||
add_dataset(group, inner_key, mydata.encode('utf-8'), 'unknown')
|
||||
elif (type(mydata) is list and type(mydata[0]) is str) or (hasattr(mydata, 'dtype') and mydata.dtype.type is np.str_):
|
||||
# For list of strings, we need this procedure
|
||||
if type(mydata[0]) is str:
|
||||
mydata = np.array(mydata)
|
||||
print("string to np.str", mydata)
|
||||
elif type(mydata[0]) is str:
|
||||
print("np.str")
|
||||
try:
|
||||
if hasattr(mydata, 'dtype') and \
|
||||
(mydata.dtype.type is np.str or \
|
||||
mydata.dtype.type is str) and len(mydata.shape) == 2:
|
||||
mydata = mydata.flatten()
|
||||
if len(mydata.shape) == 2:
|
||||
new_list = [[n.encode('ascii') for n in arr] for arr in mydata]
|
||||
max_str_size = max(max(len(n) for n in arr) for arr in mydata)
|
||||
elif len(mydata.shape) == 1:
|
||||
new_list = [n.encode('ascii') for n in mydata]
|
||||
max_str_size = max(len(n) for n in mydata)
|
||||
elif len(mydata.shape) == 0:
|
||||
new_list = [mydata.encode('ascii')]
|
||||
max_str_size = len(new_list[0])
|
||||
#print('Max len %i' % max_str_size)
|
||||
dset = group.create_dataset(inner_key, mydata.shape, 'S%i' % max_str_size, new_list)
|
||||
#print(np.array(dset))
|
||||
dset.attrs.create('system', 'unknown', (1,), dtype=dt)
|
||||
|
||||
except Exception as e:
|
||||
print('Exception:', e )
|
||||
print('Error', inner_key)
|
||||
print(type(mydata))
|
||||
if type(mydata) is list:
|
||||
print('type(mydata[0])')
|
||||
print(type(mydata[0]))
|
||||
print('len mydata shape=', len(mydata.shape))
|
||||
print('mydata')
|
||||
print(mydata)
|
||||
|
||||
elif hasattr(mydata, 'dtype') and mydata.dtype == np.dtype('O'):
|
||||
|
||||
if mydata.shape == ():
|
||||
add_dataset(group, inner_key, mydata, 'unknown')
|
||||
elif len(mydata.shape) == 1:
|
||||
add_dataset(group, inner_key, mydata, 'unknown')
|
||||
else:
|
||||
for i in range(mydata.shape[0]):
|
||||
for j in range(mydata.shape[1]):
|
||||
try:
|
||||
add_dataset(group, inner_key+'_%i_%i' % (i,j), mydata[i,j], 'unknown')
|
||||
except:
|
||||
print('Error')
|
||||
print(group, inner_key, i, j)
|
||||
else:
|
||||
|
||||
|
||||
|
||||
try:
|
||||
add_dataset(group, inner_key, mydata, 'unknown')
|
||||
except Exception as e:
|
||||
print('Error', e)
|
||||
print(inner_key, type(mydata))
|
||||
|
||||
|
||||
if dataH5 is None:
|
||||
with h5py.File(h5_filename, 'w') as dataH5:
|
||||
for main_key, subdict in data_dict.items():
|
||||
recurse_save(dataH5, subdict, main_key, None)
|
||||
print("h5_storage.py SAVED TO FILE", h5_filename, flush=True)
|
||||
else:
|
||||
print("data_dict keys", data_dict.keys())
|
||||
for main_key, subdict in data_dict.items():
|
||||
recurse_save(dataH5, subdict, main_key, None)
|
||||
print("h5_storage.py SAVED TO dataH5", flush=True)
|
||||
#recurse_save(dataH5, data_dict, 'none', new_group=dataH5)
|
||||
|
||||
|
||||
def loadH5Recursive(h5_file):
|
||||
def recurse_load(group_or_val, key, saved_dict_curr):
|
||||
type_ = type(group_or_val)
|
||||
if type_ is h5py._hl.files.File:
|
||||
for new_key, new_group_or_val in group_or_val.items():
|
||||
recurse_load(new_group_or_val, new_key, saved_dict_curr)
|
||||
elif type_ is h5py._hl.group.Group:
|
||||
saved_dict_curr[key] = new_dict = {}
|
||||
for new_key, new_group_or_val in group_or_val.items():
|
||||
recurse_load(new_group_or_val, new_key, new_dict)
|
||||
elif type_ == np.dtype('O') and type(group_or_val[()]) is bytes:
|
||||
saved_dict_curr[key] = group_or_val[()].decode()
|
||||
elif type_ == h5py._hl.dataset.Dataset:
|
||||
dtype = group_or_val.dtype
|
||||
#if not hasattr(group_or_val, 'value'):
|
||||
# print('Could not store key %s with type %s in dict' % (key, dtype))
|
||||
# return
|
||||
if dtype in (np.dtype('int64'), np.dtype('int32'), np.dtype('int16'), np.dtype('int8')):
|
||||
saved_dict_curr[key] = np.array(group_or_val[()], int).squeeze()
|
||||
elif dtype == np.dtype('bool'):
|
||||
try:
|
||||
saved_dict_curr[key] = bool(group_or_val[()])
|
||||
except:
|
||||
print('Could not store key %s with type %s in dict (1)' % (key, dtype))
|
||||
elif dtype in numerical_types:
|
||||
saved_dict_curr[key] = np.array(group_or_val[()]).squeeze()
|
||||
elif dtype.str.startswith('|S'):
|
||||
if group_or_val[()].shape == (1,1):
|
||||
saved_dict_curr[key] = group_or_val[()][0,0].decode()
|
||||
elif group_or_val[()].shape == (1,):
|
||||
saved_dict_curr[key] = group_or_val[()][0].decode()
|
||||
|
||||
elif group_or_val[()].shape == ():
|
||||
saved_dict_curr[key] = group_or_val[()].decode()
|
||||
else:
|
||||
saved_dict_curr[key] = [x.decode() for x in group_or_val[()].squeeze()]
|
||||
elif dtype.str == '|O':
|
||||
saved_dict_curr[key] = group_or_val[()]
|
||||
elif type(group_or_val[()]) is str:
|
||||
saved_dict_curr[key] = group_or_val[()]
|
||||
else:
|
||||
print('Could not store key %s with type %s in dict (2)' % (key, dtype))
|
||||
else:
|
||||
print('Could not store key %s with type %s in dict (3)' % (key, type_))
|
||||
|
||||
saved_dict = {}
|
||||
with h5py.File(h5_file, 'r') as f:
|
||||
if 'none' in f:
|
||||
recurse_load(f['none'], 'key', saved_dict)
|
||||
saved_dict = saved_dict['key']
|
||||
else:
|
||||
recurse_load(f, 'key', saved_dict)
|
||||
return saved_dict
|
||||
|
||||
def save_h5_new(saved_dict, h5_file):
|
||||
|
||||
def recurse_save(dict_, group, system):
|
||||
print('recurse', dict_.keys())
|
||||
for key, subdict_or_data in dict_.items():
|
||||
type_ = type(subdict_or_data)
|
||||
print(key, type_)
|
||||
if type_ is dict:
|
||||
new_group = group.create_group(key)
|
||||
recurse_save(subdict_or_data, new_group, system)
|
||||
elif type_ is np.ndarray:
|
||||
add_dataset(group, key, subdict_or_data, system)
|
||||
elif type_ is str:
|
||||
add_dataset(group, key, subdict_or_data, system, dtype=dt)
|
||||
else:
|
||||
raise ValueError(key, type_)
|
||||
|
||||
@lru_cache()
|
||||
def re_axis(x):
|
||||
return re.compile(r'gr_%s_axis_(\d+)_(\d+)' % x)
|
||||
|
||||
@lru_cache()
|
||||
def re_gauss_function(x):
|
||||
return re.compile(r'gr_%s_fit_gauss_function_(\d+)_(\d+)' % x)
|
||||
|
||||
n_measurements, n_images = saved_dict['Raw_data']['image'].shape[:2]
|
||||
|
||||
# Create arrays for gr / slice values, that differ in size for different n_measurements, n_images
|
||||
gr_x_shape_max = -1
|
||||
gr_y_shape_max = -1
|
||||
for key, data in sorted(saved_dict['Raw_data'].items()):
|
||||
if key.startswith('gr_x_axis'):
|
||||
gr_x_shape_max = max(gr_x_shape_max, data.shape[0])
|
||||
elif key.startswith('gr_y_axis'):
|
||||
gr_y_shape_max = max(gr_y_shape_max, data.shape[0])
|
||||
|
||||
gr_x_axis = np.zeros([n_measurements, n_images, gr_x_shape_max])*np.nan
|
||||
gr_y_axis = np.zeros([n_measurements, n_images, gr_y_shape_max])*np.nan
|
||||
gr_x_fit_gauss_function = gr_x_axis.copy()
|
||||
gr_y_fit_gauss_function = gr_y_axis.copy()
|
||||
|
||||
for key, data in sorted(saved_dict['Raw_data'].items()):
|
||||
for arr, regex in [
|
||||
(gr_x_axis, re_axis('x')),
|
||||
(gr_y_axis, re_axis('y')),
|
||||
(gr_x_fit_gauss_function, re_gauss_function('x')),
|
||||
(gr_y_fit_gauss_function, re_gauss_function('y')),
|
||||
]:
|
||||
match = regex.match(key)
|
||||
if match is not None:
|
||||
#print(key, 'matches', regex)
|
||||
n_measurement, n_image = map(int, match.groups())
|
||||
arr[n_measurement, n_image,:len(data)] = data
|
||||
continue
|
||||
|
||||
with h5py.File(h5_file, 'w') as f:
|
||||
general = f.create_group('general')
|
||||
stringDataset(general, 'user', getpass.getuser())
|
||||
stringDataset(general, 'application', 'EmittanceTool')
|
||||
stringDataset(general, 'author', 'Philipp Dijkstal and Eduard Prat')
|
||||
stringDataset(general, 'created', time.ctime())
|
||||
|
||||
experiment = f.create_group('experiment')
|
||||
try:
|
||||
from epics import caget
|
||||
lrr = float(caget('SIN-TIMAST-TMA:Beam-Exp-Freq-RB'))
|
||||
except Exception as e:
|
||||
print('Could not obtain Laser rep rate!')
|
||||
print(e)
|
||||
lrr = np.nan
|
||||
add_dataset(experiment, 'Laser rep rate', lrr, 'unknown')
|
||||
# TBD: save snapshot here
|
||||
|
||||
scan1 = f.create_group('scan 1')
|
||||
|
||||
method = scan1.create_group('method')
|
||||
method.create_dataset('records', data=[float(n_measurements)])
|
||||
method.create_dataset('samples', data=[float(n_images)])
|
||||
method.create_dataset('dimension', data=[1])
|
||||
stringDataset(method, 'type', 'Line scan')
|
||||
recurse_save(saved_dict['Input'], method, 'Application Input')
|
||||
|
||||
|
||||
data = scan1.create_group('data')
|
||||
|
||||
screen = data.create_group(saved_dict['Input']['Profile monitor'])
|
||||
recurse_save(saved_dict['Meta_data'], screen, 'Emittance data')
|
||||
|
||||
|
||||
for key, data_ in sorted(saved_dict['Raw_data'].items()):
|
||||
if not any([x.match(key) for x in [re_axis('x'), re_axis('y'), re_gauss_function('x'), re_gauss_function('y')]]):
|
||||
add_dataset(screen, key, data_, 'Camera')
|
||||
#print('Created %s' % key)
|
||||
|
||||
if not np.all(np.isnan(gr_x_axis)):
|
||||
add_dataset(screen, 'gr_x_axis', gr_x_axis, 'Camera')
|
||||
else:
|
||||
print('gr_x_axis is nan')
|
||||
if not np.all(np.isnan(gr_y_axis)):
|
||||
add_dataset(screen, 'gr_y_axis', gr_y_axis, 'Camera')
|
||||
else:
|
||||
print('gr_y_axis is nan')
|
||||
if not np.all(np.isnan(gr_x_fit_gauss_function)):
|
||||
add_dataset(screen, 'gr_x_fit_gauss_function', gr_x_fit_gauss_function, 'Camera')
|
||||
else:
|
||||
print('gr_x_fit_gauss_function is nan')
|
||||
if not np.all(np.isnan(gr_y_fit_gauss_function)):
|
||||
add_dataset(screen, 'gr_y_fit_gauss_function', gr_y_fit_gauss_function, 'Camera')
|
||||
else:
|
||||
print('gr_y_fit_gauss_function is nan')
|
||||
|
||||
if 'Magnet_data' in saved_dict:
|
||||
for n_magnet, magnet in enumerate(saved_dict['Magnet_data']['Magnets']):
|
||||
mag_group = method.create_group('actuators/%s' % magnet)
|
||||
add_dataset(mag_group, 'K', saved_dict['Magnet_data']['K'][n_magnet], 'Magnet')
|
||||
add_dataset(mag_group, 'I-SET', saved_dict['Magnet_data']['I-SET'][n_magnet], 'Magnet')
|
||||
elif not saved_dict['Input']['Dry run'] in (np.array(False), False):
|
||||
raise ValueError('No magnet data')
|
||||
else:
|
||||
print('Magnet data not saved.')
|
||||
|
||||
@@ -220,7 +220,6 @@ class QSaveHDF(QDialog):
|
||||
|
||||
|
||||
def get_data(self):
|
||||
|
||||
self.user_dict['Application'] = self.applicationLabel.text()
|
||||
self.user_dict['User'] = self.author.text()
|
||||
self.user_dict['Comment'] = self.comment.document().toPlainText()
|
||||
|
||||
@@ -4,9 +4,9 @@ import os
|
||||
import time
|
||||
|
||||
from qtpy.QtCore import Qt
|
||||
from qtpy.QtWidgets import (QComboBox, QDialog, QFileDialog, QHBoxLayout,
|
||||
QLabel, QLineEdit, QPushButton, QTextEdit,
|
||||
QVBoxLayout)
|
||||
from qtpy.QtWidgets import (
|
||||
QApplication, QComboBox, QDialog, QFileDialog, QHBoxLayout, QLabel,
|
||||
QLineEdit, QPushButton, QTextEdit, QVBoxLayout)
|
||||
|
||||
import elog # https://github.com/paulscherrerinstitute/py_elog
|
||||
from pyqtacc.bdbase.enumkind import MsgSeverity
|
||||
@@ -212,7 +212,7 @@ class QSendToELOGFrame(QDialog):
|
||||
self.attributes['When'] = str(time.time())
|
||||
self.attributes['Wann'] = str(time.time())
|
||||
|
||||
|
||||
QApplication.processEvents()
|
||||
|
||||
if self.attachFile is not None:
|
||||
_attachFile = []
|
||||
@@ -227,15 +227,18 @@ class QSendToELOGFrame(QDialog):
|
||||
self.files.append(str(_attachFile[i]))
|
||||
elif "/sls/bd/data/" in _attachFile[i]:
|
||||
self.files.append(str(_attachFile[i]))
|
||||
elif "/sf/data/" in _attachFile[i]:
|
||||
self.files.append(str(_attachFile[i]))
|
||||
else:
|
||||
self.files.append(self.destination + str(_attachFile[i]))
|
||||
QApplication.processEvents()
|
||||
|
||||
el = self.elog_items.currentText()
|
||||
|
||||
url = self.parent.settings.data["ElogBooks"][el]["url"]
|
||||
|
||||
self.logbook = elog.open(url, user='robot', password='robot')
|
||||
|
||||
QApplication.processEvents()
|
||||
|
||||
try:
|
||||
if self.files:
|
||||
@@ -307,18 +310,24 @@ class QSendToELOGFrame(QDialog):
|
||||
#find layout items
|
||||
layout_items = []
|
||||
layout_items_optional = []
|
||||
print("logbook", logbook)
|
||||
try:
|
||||
layout_items = list(self.parent.settings.data[
|
||||
"ElogBooks"][logbook]['Required'].keys())
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
print("logbook- items", layout_items)
|
||||
|
||||
try:
|
||||
layout_items_optional = list(self.parent.settings.data[
|
||||
"ElogBooks"][logbook]['Optional'].keys())
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
layout_items.extend(layout_items_optional)
|
||||
print("logbook- optional", layout_items_optional)
|
||||
if layout_items_optional:
|
||||
layout_items.extend(layout_items_optional)
|
||||
|
||||
return layout_items
|
||||
|
||||
|
||||
12
utils.py
Normal file
12
utils.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from inspect import currentframe
|
||||
|
||||
def _line():
|
||||
"""Macro to return the current line number.
|
||||
|
||||
The current line number within the file is used when
|
||||
reporting messages to the message logging window.
|
||||
|
||||
Returns:
|
||||
int: Current line number.
|
||||
"""
|
||||
return currentframe().f_back.f_lineno
|
||||
Reference in New Issue
Block a user