start incorporating correlation analysis

This commit is contained in:
2024-09-14 19:34:24 +02:00
parent 9a62dc88c6
commit 92c0d889cc
4 changed files with 231 additions and 78 deletions

View File

@@ -92,8 +92,8 @@ print(y1[0:50], y2[0:50] )
]
y2= [ i*5 for i in y2]
'''
'''
f1 = hdf.File('/hipa/bd/data/measurements/Tina_2024-08-07_10:41:04.h5','a')
f1 = hdf.File('/hipa/bd/data/measurements/Tina_2024-07-03_15:44:14.h5','a') #5Tina_2024-08-07_10:41:04.h5','a')
grp = f1.require_group('Raw_data')
if 't' in grp:
del grp['t']
@@ -109,8 +109,6 @@ dset_t = grp.create_dataset("t", data=t)
dset_t_idx = grp.create_dataset("t_idx", data=idx)
print(dset_y1.name)
f1.close()
'''
idx = np.array(idx) - t_offset*t_stepsize

View File

@@ -3,6 +3,7 @@ Analysis class
"""
from datetime import datetime
import inspect
import logging
import os
from statistics import mean
import time
@@ -47,20 +48,35 @@ class AnalysisProcedure(QObject):
self.logger.debug("Logging activated in analysis procedure")
self.abort = False
#Package the data as so:
self.all_data = {}
self.all_data['Input data'] = {}
self.all_data['Ambient data'] = {}
self.all_data['Application Raw data'] = {}
self.all_data['Processed data'] = {}
self.all_data['Figure data'] = {}
self.all_data['Raw data'] = {}
self.raw_data = {}
self.trigger_abort.connect(self.receive_abort)
#Declare input parameters
self.input_data = None
self.debug = False
self.log_level = logging.INFO
self.simulation = True
self.facility = None
self.maxmin = None
self.maximize = None
self.N_events = None
self.N_points = None
self.checkbox = None
self.accelerator = 'Cyclotron'
self.harmonic_no = 6
self.N_turns = None
#self.t_stepsize = 0.000000019750043 #0.00000002
self.rf_freq =50.6328 #10**6
self.t_stepsize = 1/(50.6328*10**6)
self.dTcable = 44 #ns
self.dNpickup = 1
#Turn off DEBUG for MATLAB
mat_logger = logging.getLogger('matplotlib')
mat_logger.setLevel(logging.ERROR)
@Slot()
def receive_abort(self):
@@ -72,7 +88,7 @@ class AnalysisProcedure(QObject):
def aborting(self, line_no):
self.abort = False
mess = "Measurement aborted"
#mess = "Measurement aborted"
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_ABORTED)
@@ -88,39 +104,53 @@ class AnalysisProcedure(QObject):
if self.debug:
self.logger.debug("INPUT DATA to LOG:{0}".format(self.input_data))
self.simulation = bool(self.input_data['simulation'])
print("INPUT:", self.input_data)
#self.facility = self.input_data['facility']
try:
try:
self.accelerator = self.input_data['accelerator']
print("self.accelerator -1 ", self.accelerator, flush=True)
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
print("self.accelerator-2", self.accelerator, flush=True)
try:
self.accelerator = self.input_data['qtabdata']
print("self.accelerator-3", self.accelerator, flush=True)
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
try:
self.accelerator_selected = self.input_data['qtabdata']
print("self.accelerator-F", self.accelerator, flush=True)
self.harmonic_no = float(
self.input_data[self.accelerator_selected]['harmonic'])
self.rf_fref = float(
self.input_data[self.accelerator_selected]['freqrf'])
self.input_data[self.accelerator]['harmonic'])
self.rf_freq = float(
self.input_data[self.accelerator]['freqrf'])
self.dTcable = int(
self.input_data[self.accelerator_selected]['deltaTcable'])
self.input_data[self.accelerator]['deltaTcable'])
self.dNpickup = int(
self.input_data[self.accelerator_selected]['deltaNpickup'])
self.input_data[self.accelerator]['deltaNpickup'])
print("logging info level==>", self.logger.getEffectiveLevel(),
flush=True)
self.loglevel = self.input_data['loggingLevel']
self.logger.setLevel(self.logging.getLevelName(self.loglevel))
print("logging info level==>", self.logger.getEffectiveLevel(),
flush=True)
self.logger.debug("INPUT PARAMETERS")
self.logger.info("INPUT PARAMETERS")
self.logger.debug("Accelerator: {0}".format(
self.accelerator_selected))
self.logger.debug("Simulation {0}".format(self.simulation))
self.logger.debug("Harmonic No. {0}".format(self.harmonic_no))
self.logger.debug("RF Frequency (Hz) {0}".format(self.rf_freq))
self.logger.debug("dT Cable {0}".format(self.dTcable))
self.logger.debug("dN Pickup {0}".format(self.dNpickup))
self.logger.info("Accelerator: {0}".format(self.accelerator))
self.logger.info("Simulation {0}".format(self.simulation))
self.logger.info("Harmonic No. {0}".format(self.harmonic_no))
self.logger.info("RF Frequency (Hz) {0}".format(self.rf_freq))
self.logger.info("dT Cable {0}".format(self.dTcable))
self.logger.info("dN Pickup {0}".format(self.dNpickup))
except KeyError as ex:
@@ -140,14 +170,12 @@ class AnalysisProcedure(QObject):
self.parent.trigger_log_message.emit(
MsgSeverity.INFO.name, _pymodule, utils.line_no(), mess, {})
return None
#Read the input parameters from the GUI
self.initialize_input_parameters(input_data)
#Step 1 - Collect ambient data relate to the machine
ambient_data = self.collect_ambient_data()
self.all_data['Ambient data'] = self.collect_ambient_data()
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_START)
#Step 2 - Perform measurement and return data for processing
@@ -157,39 +185,56 @@ class AnalysisProcedure(QObject):
if self.raw_data is None:
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_ERROR)
return None
self.all_data['Application Raw data'] = self.raw_data
#Step 3 - Process the raw data
proc_data = self.process(ambient_data)
self.all_data['Processed data'] = self.process()
#Step 4 - Provide plots
fig_data = self.make_figs(ambient_data, proc_data)
self.all_data['Figure data'] = self.make_figs()
#Step 5 - Package to all_data dictionary
all_data = self.combine_data(ambient_data, proc_data, fig_data)
print("all data", all_data)
#all_data = self.combine_data(self.all_data['Ambient data'], proc_data, fig_data)
#print("all data", all_data)
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_END)
return all_data
return self.all_data
def load_hdf_file(self, hdf_filename_loaded):
print("raw_data==>", flush=True)
print("raw_data==>", hdf_filename_loaded, flush=True)
raw_data = h5_storage.loadH5Recursive(hdf_filename_loaded)
self.raw_data = raw_data
print("loadH5Recursive", self.raw_data)
return self.raw_data
def reanalyze(self, all_data):
print("reanalyze", flush=True)
print(all_data)
input_data = all_data['Input data']
ambient_data = all_data['Ambient data']
self.raw_data = all_data['Raw data']
proc_data = self.process(ambient_data, from_hdf5=True)
fig_data = self.make_figs(ambient_data, proc_data)
input_data = all_data['Input_data']
#Read the input parameters
self.initialize_input_parameters(input_data)
print("initiliaze", flush=True)
ambient_data = all_data['Ambient_data']
self.raw_data = all_data['Raw_data']
ambient_data['Time in seconds'] = int(ambient_data['Time in seconds'])
self.parent.from_hdf = True
proc_data = self.process(from_hdf5=True)
fig_data = self.make_figs()
all_data_new = self.combine_data(ambient_data, proc_data,
fig_data)
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_END)
return(all_data_new)
@@ -222,8 +267,9 @@ class AnalysisProcedure(QObject):
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_ERROR)
return {}
ambient_data = {
'Time in seconds': time_in_seconds,
'Time in seconds': int(time_in_seconds),
'Time stamp': time_stamp,
}
@@ -233,8 +279,10 @@ class AnalysisProcedure(QObject):
return ambient_data
def measure(self):
for i in range (1, 100):
self.parent.from_hdf = False
for i in range (1, 90):
if i%10 == 0:
self.parent.trigger_progressbar.emit(i)
time.sleep(0.1)
@@ -250,10 +298,47 @@ class AnalysisProcedure(QObject):
return raw_data
def process(self, ambient_data, from_hdf5=False):
def unpack_hdf_data(self):
self.analytic_signal_1 = self.raw_data['y1']
self.analytic_signal_2 = self.raw_data['y2']
def process(self, from_hdf5=False):
self.parent.trigger_progressbar.emit(PROGRESS_BAR_THREAD_START)
if from_hdf5:
self.unpack_hdf_data()
self.mean_amplitude_envelope_1 = np.mean(self.analytic_signal_1,
keepdims=True)
self.mean_amplitude_envelope_2 = np.mean(self.analytic_signal_2,
keepdims=True)
self.normalized_amplitude_envelope_1 = (
self.analytic_signal_1 - self.mean_amplitude_envelope_1)
self.normalized_amplitude_envelope_2 = (
self.analytic_signal_2 - self.mean_amplitude_envelope_2)
corr_full = signal.correlate(
self.normalized_amplitude_envelope_2,
self.normalized_amplitude_envelope_1, mode='full', method='auto')
lags_full_array = signal.correlation_lags(
len(self.normalized_amplitude_envelope_2),
len(self.normalized_amplitude_envelope_1), mode='full')
self.lag_full = lags_full_array[np.argmax(corr_full)]
self.delay = self.lag_full * self.t_stepsize
print(self.delay, flush=True)
print(self.dTcable, flush=True)
print(self.rf_freq, flush=True)
print(self.harmonic_no, flush=True)
print(self.dNpickup, flush=True)
self.N_turns = (((self.delay-self.dTcable*10**(-9))*self.rf_freq*10**6)/self.harmonic_no)-self.dNpickup
print("lag = {0}, delay = {1}, nturns={2}".format(
self.lag_full, self.delay, self.N_turns))
####envelope
duration = 1.0
fs = 400.0 #400.0
@@ -289,11 +374,11 @@ class AnalysisProcedure(QObject):
self.clock = np.arange(64, len(self.sig), 128)
nturns = 180
proc_data = {"nturns": nturns}
proc_data = {"nturns": self.N_turns}
return proc_data
def make_figs(self, ambient_data, proc_data):
def make_figs(self):
fig, (ax_orig, ax_envelope, ax_noise, ax_corr) = plt.subplots(4, 1, sharex=True) #True
ax_orig.plot(self.sig)

View File

@@ -34,7 +34,9 @@
"Parameters":{
"facility": {"flag": 0, "data" : {"widget": "QComboBox", "text" : "Facility:",
"link": ["HIPA"],"layout" : "Horizontal"}},
"qtabdata" : {"flag" : 1, "data":{ "widget": "QTabWidget", "text" : "Accelerator: ", "link" : "QTabAccelerator", "value" : 1, "color" : ["#0080aa", "#0000ff"]}}
"accelerator" : {"flag" : 1, "data":{ "widget": "QTabWidget", "text" : "Accelerator: ",
"link" : "QTabAccelerator", "value" : 1,
"color" : ["#0080aa", "#0000ff"]}}
},
"Expert":{

112
tina.py
View File

@@ -44,8 +44,35 @@ class StartMain(BaseWindow):
self.appname = _appname
self.source_file = _abspath #required for HDF
self.elog_enum = ElogHIPA()
self.accelerator = 'Cyclotron'
#self.from_hdf = False in base class
self.message = ""
self.gui = AppGui(self)
def prepare_results_message(self):
"""Prepare results message
"""
self.no_turns = self.all_data["Processed data"]["nturns"]
try:
self.accelerator = self.all_data["Input data"]["accelerator"]
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
try:
self.accelerator = self.all_data["Input data"]['qtabdata']
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
_mess = "Reanalysis from HDF5. " if self.from_hdf else ""
self.message = (_mess +
"The number of turns measured in the {0} = {1} ({2:.2f})".format(
self.accelerator, int(self.no_turns), self.no_turns))
def prepare_elog_message(self):
"""Define elog parameters and define message
"""
@@ -54,7 +81,8 @@ class StartMain(BaseWindow):
self.system_idx = self.elog_enum.system.BEAMDYNAMICS
self.eintrag_idx = self.elog_enum.eintrag.INFO
self.ort_idx = self.elog_enum.ort.RING_CYCLOTRON #else INJECTOR2
self.ort_idx = self.elog_enum.ort.RING_CYCLOTRON if \
'Cyclotron' in self.accelerator else self.elog_enum.ort.INJECTOR2
self.status_idx = self.elog_enum.status.NONE
self.effekt_idx = self.elog_enum.effekt.NO
@@ -73,22 +101,51 @@ class StartMain(BaseWindow):
self.logbook = "Sandkasten" if simulation else "HIPA"
self.title = _title
self.no_turns = 180
self.message = "The number of turns measured in the ring = {0}".format(
self.no_turns)
@Slot()
def analysis_thread_finished(self):
BaseWindow.analysis_thread_finished(self)
self.gui_frame.central_tab_widget.setCurrentIndex(1)
BaseWindow.analysis_thread_finished(self)
if self.all_data is not None:
if self.all_data['Figure data'] is not None:
self.gui_frame.central_tab_widget.setCurrentIndex(1)
self.prepare_results_message()
@Slot()
def hdf_thread_finished(self):
BaseWindow.hdf_thread_finished(self)
self.prepare_results_message()
self.show_log_message(MsgSeverity.INFO, _pymodule, utils.line_no(),
self.message)
@Slot()
def save_to_hdf_dialog(self):
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Saving duplicate data to HDF is declined.")
QMessageBox.information(self, "HDF", _mess, QMessageBox.Ok)
QApplication.processEvents()
return False
BaseWindow.save_to_hdf_dialog(self)
@Slot()
def save_to_hdf(self, from_dialog=False):
if not self.verify_save_to_hdf():
return False
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Saving duplicate data to HDF is declined.")
QMessageBox.information(self, "HDF", _mess, QMessageBox.Ok)
QApplication.processEvents()
return False
if self.all_data is not None:
self.save_hdf_thread = self.HDFSave(self, from_dialog)
# self.save_hdf_thread.started.connect(self.save_to_hdf_started)
@@ -103,16 +160,19 @@ class StartMain(BaseWindow):
"""User supplied hdf data
"""
if self.all_data is not None:
self.all_data['Raw data']['Input_data'] = self.all_data[
'Input data']
self.all_data['Raw data']['Ambient_data'] = self.all_data[
'Ambient data']
self.all_data['Raw data']['Processed_data'] = self.all_data[
'Processed data']
self.all_data['Raw data']['Raw_data'] = self.all_data[
'Application Raw data']
#All but 'Figure data'
#self.all_data['Raw data']['Input_data'] = self.all_data[
# 'Input data']
#self.all_data['Raw data']['Ambient_data'] = self.all_data[
# 'Ambient data']
#self.all_data['Raw data']['Processed_data'] = self.all_data[
# 'Processed data']
#self.all_data['Raw data']['Raw_data'] = self.all_data[
# 'Application Raw data']
self.all_data['Raw_data'] = self.all_data['Application Raw data']
del self.all_data['Figure data']
h5_storage.saveH5Recursive(
self.hdf_filename, self.all_data['Raw data'], dataH5)
self.hdf_filename, self.all_data, dataH5)
@Slot()
def send_to_elog(self):
@@ -183,19 +243,27 @@ class StartMain(BaseWindow):
print(self.all_data)
if not BaseWindow.verify_save_to_epics(self):
return False
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Analysis results are not saved to EPICS")
QMessageBox.information(self, "EPICS", _mess, QMessageBox.Ok)
QApplication.processEvents()
return False
dict_bunch = {}
nturns = 0
debug = True
dry_run = False
nturns = int(self.no_turns)
pv = self.settings.data["PVnturns"]["Cyclotron"]
dict_bunch[pv] = nturns
if not dry_run:
status, status_list = self.send_to_epics(dict_bunch)
if status == self.cyca.ICAFE_NORMAL:
message = "Saved data to EPICS - No of turns = ".format(nturns)
message = "Saved data to EPICS; No turns = {0}".format(nturns)
sev = MsgSeverity.INFO
else:
message = "Value (nturns) not saved to epics"
message = "Value (nturns={0}) not saved to epics".format(nturns)
sev = MsgSeverity.ERROR
self.show_log_message(sev.name, _pymodule, utils.line_no(), message)
@@ -222,10 +290,10 @@ class StartMain(BaseWindow):
"""<b>{0}</b> v {1}
<p>Copyright &copy; Paul Scherrer Institut (PSI).
All rights reserved.</p>
<p>Authors: P.-A. Duperrex, W. Koprek, J. Chrin, A. Facchetti </p>
<p>Authors: P.-A. Duperrex, J. Chrin, A. Facchetti, W. Koprek, </p>
<p>A python implementation of the LabVIEW measurement developed by P.-A. Duperrex <br>
Ref: P.-A. Duperrex and A. Facchetti <br>
Number of Turn Measurements on the HIPA Cyclotrons at PSI <br>
'Number of Turn Measurements on the HIPA Cyclotrons at PSI' <br>
doi:10.18429/JACoW-IPAC2018-WEPAL067 </p>
<p>Responsible: W. Koprek, WBBA/315, Tel. x3765,
waldemar.koprek@psi.ch </p>