adapted to new ADC hardware

This commit is contained in:
2024-12-06 13:58:30 +01:00
parent 9dd9f096ec
commit 934bb9f1ed
8 changed files with 515 additions and 3497 deletions

1
.gitignore vendored
View File

@@ -1,3 +1,4 @@
out
test.json
*.*~
*.*-*

1
measurements Symbolic link
View File

@@ -0,0 +1 @@
/hipa/bd/data/measurements

3002
out

File diff suppressed because it is too large Load Diff

View File

@@ -3,22 +3,19 @@ Analysis class
"""
from collections import OrderedDict
from datetime import datetime
import inspect
import logging
import math
import os
import pandas as pd
from statistics import mean
import time
import timeit
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import ticker
from matplotlib import ticker
import numpy as np
from scipy import signal
from scipy.signal import chirp, hilbert
#from scipy.signal import hilbert
from qtpy.QtCore import QObject, Signal, Slot
from apps4ops.bdbase import h5_storage, utils
@@ -41,8 +38,9 @@ class AnalysisProcedure(QObject):
Analysis procedure
"""
trigger_abort = Signal()
def __init__(self, parent=None):
super(AnalysisProcedure, self).__init__(parent)
super().__init__(parent)
self.parent = parent
self.settings = self.parent.settings
self.cafe = self.parent.cafe
@@ -50,19 +48,19 @@ class AnalysisProcedure(QObject):
self.check_status = self.parent.check_status
self.check_status_list = self.parent.check_status_list
self.trigger_progressbar = self.parent.trigger_progressbar
self.daq_timeout = 10
self.daq_counter = 0
self.daq_timeout = 30
self.daq_ready = 0
self.logging = self.parent.logging
self.logger = self.logging.getLogger(__name__)
self.logger.debug("Logging activated in analysis procedure")
self.logger.debug('Logging activated in analysis procedure')
self.abort = False
self.trigger_abort.connect(self.receive_abort)
#Hold PV names and their values
# Hold PV names and their values
self.pv_value_dict = OrderedDict()
self.pv_dict = {}
#Package the data as so:
# Package the data as so:
self.all_data = {}
self.all_data['Input data'] = {}
self.all_data['Ambient data'] = {}
@@ -74,8 +72,8 @@ class AnalysisProcedure(QObject):
self.injector_2 = self.parent.injector_2
self.ring_cyclotron = self.parent.ring_cyclotron
#Declare input parameters
# Declare input parameters
self.input_data = None
self.debug = False
@@ -83,25 +81,24 @@ class AnalysisProcedure(QObject):
self.simulation = False
self.accelerator = self.ring_cyclotron
self.harmonic_no = 6
self.N_turns = None
#self.t_stepsize = 0.000000019750043 #0.00000002
self.rf_freq = 50.6328 #10**6
self.rf_sample =3.0 #10**6
self.injector2_current = 0
self.n_turns = None
# self.t_stepsize = 0.000000019750043 #0.00000002
self.rf_freq = 50.6328 # 10**6
self.rf_sample = 3.0 # 10**6
self.pulse_stepsize = 1/(self.rf_freq*10**6)
self.t_stepsize = 1/(self.rf_sample*10**9)
self.t_interval = math.ceil(self.pulse_stepsize/self.t_stepsize)
self.dTcable = 44 #ns
self.dNpickup = -1
self.mod_freq = 500 #GHz
self.duty_cycle = 1 # percentage
#Turn off DEBUG for MATLAB
self.dt_cable = 44 # ns
self.dn_pickup = -1
self.mod_freq = 500 # GHz
self.duty_cycle = 1 # percentage
# Turn off DEBUG for MATLAB
mat_logger = logging.getLogger('matplotlib')
mat_logger.setLevel(logging.ERROR)
@Slot()
def receive_abort(self):
"""
@@ -109,187 +106,190 @@ class AnalysisProcedure(QObject):
"""
self.abort = True
self.trigger_progressbar.emit(int(PROGRESS_THREAD_ABORTING))
print("RECEIVE ABORT...", flush=True)
def aborting(self, line_no):
self.abort = False
#mess = "Measurement aborted"
self.trigger_progressbar.emit(int(PROGRESS_THREAD_ABORTED))
self.parent.trigger_log_message.emit(
MsgSeverity.WARN.name, _pymodule, line_no,
("Measurement procedure aborted in analysis thread"), {} )
#########INITIALIZE THE INPUTS FOM THE GUI#######################
self.parent.trigger_log_message.emit(
MsgSeverity.WARN.name, _pymodule, line_no,
('Measurement procedure aborted in analysis thread'), {})
print("ABORTING...", flush=True)
#########INITIALIZE THE INPUTS FOM THE GUI#######################
def initialize_input_parameters(self, input_data: dict):
self.input_data = input_data
self.all_data['Input data'] = self.input_data
print('==>Initialize Input Parameters')
print(self.input_data)
print('==>Initialized Input Parameters')
if 'debug' in self.input_data.keys():
self.debug = self.input_data['debug']
if self.debug:
self.logger.debug("INPUT DATA to LOG:{0}".format(self.input_data))
self.logger.debug(f'INPUT DATA to LOG:{self.input_data}')
self.simulation = bool(self.input_data['simulation'])
self.rf_freq = float(self.input_data['freqrf'])
#2.5 MHz if oscilloscpe
# 2.5 MHz if oscilloscpe
if self.simulation:
self.rf_sample = 2.5
mess = 'Sampling rate changed to 2.5 MHz for oscilloscope data'
self.parent.trigger_log_message.emit(
MsgSeverity.INFO.name, _pymodule, utils.line_no(), mess, {})
else:
self.rf_sample = float(self.input_data['freqsampling'])
try:
self.accelerator = self.input_data['accelerator']
else:
self.rf_sample = float(self.input_data['freqsampling'])
try:
self.accelerator = self.input_data['accelerator']
self.harmonic_no = float(
self.input_data[self.accelerator]['harmonic'])
self.dTcable = float(
self.dt_cable = float(
self.input_data[self.accelerator]['deltaTcable'])
self.dNpickup = int(
self.dn_pickup = int(
self.input_data[self.accelerator]['deltaNpickup'])
if self.injector_2 in self.accelerator:
self.mod_freq = float(
self.input_data[self.accelerator]['freqmod']) #* 10**9 #GHz
self.input_data[self.accelerator]['freqmod']) # * 10**9 GHz
self.duty_cycle = float(
self.input_data[self.accelerator]['dutycycle']) # * 0.01
self.input_data[self.accelerator]['dutycycle']) # * 0.01
self.loglevel = self.input_data['loggingLevel']
self.logger.setLevel(self.logging.getLevelName(self.loglevel))
self.logger.info("INPUT PARAMETERS")
self.logger.info("Accelerator: {0}".format(self.accelerator))
self.logger.info("Simulation {0}".format(self.simulation))
self.logger.info("RF Frequency (10**6 Hz) {0}".format(self.rf_freq))
self.logger.info("RF Sampling (10**9 Hz) {0}".format(self.rf_sample))
self.logger.info("Harmonic No. {0}".format(self.harmonic_no))
self.logger.info("dT Cable {0}".format(self.dTcable))
self.logger.info("dN Pickup {0}".format(self.dNpickup))
self.logger.info('INPUT PARAMETERS')
self.logger.info(f'Accelerator: {self.accelerator}')
self.logger.info(f'Simulation {self.simulation}')
self.logger.info(
f'RF Frequency (10**6 Hz) {self.rf_freq}')
self.logger.info(
f'RF Sampling (10**9 Hz) {self.rf_sample}')
self.logger.info(f'Harmonic No. {self.harmonic_no}')
self.logger.info(f'dT Cable {self.dt_cable}')
self.logger.info(f'dN Pickup {self.dn_pickup}')
except KeyError as ex:
self.logger.error("KeyError {0}".format(ex))
self.logger.error(f'KeyError {ex}')
except ValueError as ex:
self.logger.error("ValueError {0}".format(ex))
except Exception as ex:
self.logger.error("Exception {0}".format(ex))
self.logger.error(f'ValueError {ex}')
def measure_and_analyze(self, input_data=None):
'''This method is initiated by the START button in Procedure panel
'''
if input_data is None:
mess = "No input parameters given; no measurement performed"
mess = 'No input parameters given; no measurement performed'
self.parent.trigger_log_message.emit(
MsgSeverity.INFO.name, _pymodule, utils.line_no(), mess, {})
return None
#Read the input parameters from the GUI
return None
# Read the input parameters from the GUI
self.initialize_input_parameters(input_data)
#Step 1 - Collect ambient data relate to the machine
# Step 1 - Collect ambient data relate to the machine
self.all_data['Ambient data'] = self.collect_ambient_data()
self.trigger_progressbar.emit(int(PROGRESS_THREAD_START))
#Step 2 - Perform measurement and return data for processing
# Step 2 - Perform measurement and return data for processing
self.all_data['Raw data'] = self.measure()
if self.all_data['Raw data'] is None:
self.trigger_progressbar.emit(int(PROGRESS_THREAD_ERROR))
return None
#Step 3 - Process the raw data
self.all_data['Processed data'] = self.process()
#Step 4 - Provide plots
self.all_data['Figure data'] = self.make_figs()
self.trigger_progressbar.emit(int(PROGRESS_THREAD_END))
return self.all_data
# Step 3 - Process the raw data
self.all_data['Processed data'] = self.process()
# Step 4 - Provide plots
self.all_data['Figure data'] = self.make_figs()
self.trigger_progressbar.emit(int(PROGRESS_THREAD_END))
return self.all_data
def load_hdf_file(self, hdf_filename_loaded):
print("load_hdf_file==>", hdf_filename_loaded, flush=True)
print(f'load_hdf_file==> {hdf_filename_loaded}', flush=True)
raw_data = h5_storage.loadH5Recursive(hdf_filename_loaded)
self.raw_data = raw_data
print("loadH5Recursive", raw_data, flush=True)
return raw_data
def reanalyze(self, all_data):
print("reanalyze", flush=True)
'''Reanalysis
'''
print('Reanalyze', flush=True)
print(all_data)
input_data = all_data['Input_data']
#Read the input parameters
# Read the input parameters
self.initialize_input_parameters(input_data)
ambient_data = all_data['Ambient_data']
self.raw_data = all_data['Raw_data']
self.all_data['Raw data'] = self.raw_data
ambient_data['Time in seconds'] = int(ambient_data['Time in seconds'])
self.time_stamp = ambient_data['Time stamp']
self.all_data['Ambient data'] = ambient_data
try:
ambient_data['I_Inj2'] = float(ambient_data['I_Inj2'])
self.injector2_current = ambient_data['I_inj2']
except KeyError:
self.injector2_current = 0.0
self.parent.from_hdf = True
self.time_stamp = ambient_data['Time stamp']
self.all_data['Ambient data'] = ambient_data
self.all_data['Processed data'] = self.process(from_hdf5=True)
self.all_data['Figure data'] = self.make_figs()
self.trigger_progressbar.emit(PROGRESS_THREAD_END)
return(self.all_data)
self.trigger_progressbar.emit(PROGRESS_THREAD_END)
return self.all_data
def collect_ambient_data(self):
"""Collect ambient data and return it as a dictionary
"""
'''Collect ambient data and return it as a dictionary.
Also opens PV channels for DAQ
'''
# Time in seconds in an integer and can be stored in hdf5
time_in_seconds = time.time()
self.time_stamp = datetime.fromtimestamp(
time_in_seconds).strftime('%a %d-%m-%Y %H:%M:%S')
ambient_data = {
'Time in seconds': int(time_in_seconds),
'Time stamp': self.time_stamp,
'I_Inj2': 0,
}
self.logger.debug("{0}".format(ambient_data))
self.logger.debug(f'Ambient data = {ambient_data}')
#if self.simulation:
# if self.simulation:
# return ambient_data
#EPICS...
# EPICS...
# Attach context, open DAQ PV channels
handles = self.cafe.getHandles()[0]
status = self.cafe.attachContext(handles[0])
if status == self.cyca.ECAFE_NULLCONTEXT:
options = {}
options['statusCode'] = (str(status) + " " +
self.cafe.getStatusCodeAsString(status))
options['statusCode'] = (str(status) + ' ' +
self.cafe.getStatusCodeAsString(status))
options['statusInfo'] = self.cafe.getStatusInfo(status)
self.parent.trigger_log_message.emit(
MsgSeverity.ERROR.name, _pymodule, utils.line_no(),
("Cannot attach CA context in thread " +
"Measurement will not be initiated!"), _options)
('Cannot attach CA context in thread ' +
'Measurement will not be initiated!'), options)
if self.abort:
self.aborting(utils.line_no())
@@ -298,244 +298,291 @@ class AnalysisProcedure(QObject):
self.trigger_progressbar.emit(PROGRESS_THREAD_ERROR)
return {}
# Retrieve I_INJ2
mwc2_ist_2 = self.cafe.getCache('MWC2:IST:2')
if mwc2_ist_2 is not None:
ambient_data['I_Inj2'] = mwc2_ist_2
#mA
self.injector2_current = ambient_data['I_Inj2']
pv_list = []
for key, value in self.settings.data['PV'][self.accelerator].items():
self.pv_value_dict[key] = OrderedDict()
self.pv_value_dict[key][value] =0
self.pv_value_dict[key][value] = 0
self.pv_dict[key] = value
pv_list.append(value)
self.cafe.openPrepare()
handle_list = self.cafe.open(pv_list)
self.cafe.openNowAndWait(1.0)
self.cafe.setGetActionWhenMonitorPolicyAllHandles(
self.cyca.GET_FROM_CACHE)
self.cyca.GET_FROM_CACHE)
value_list, status, status_list = self.cafe.getScalarList(handle_list)
if self.debug:
for pv, val, stat in zip(pv_list, value_list, status_list):
print(pv, val, stat)
if status != self.cyca.ICAFE_NORMAL:
self.check_status_list(_pymodule, "getScalarList",
self.check_status_list(_pymodule, 'getScalarList',
pv_list, status_list, utils.line_no())
pv_daq_counter = self.pv_dict['daqCounter']
#self.cafe.monitor(pv_daq_counter)
self.daq_counter = self.cafe.getCache(pv_daq_counter)
if self.daq_counter is None:
stat = self.cafe.getStatus(pv_daq_counter)
self.check_status(_pymodule, "getCache", pv_daq_counter, stat,
utils.line_no())
#Put values in dictionary for inspection
pv_daq_ready = self.pv_dict['daqReady']
self.daq_ready = self.cafe.getCache(pv_daq_ready)
if self.daq_ready is None:
stat = self.cafe.getStatus(pv_daq_ready)
self.check_status(_pymodule, 'getCache', pv_daq_ready, stat,
utils.line_no())
pv_daq_error_count = self.pv_dict['daqErrorCount']
daq_error_count = self.cafe.getCache(pv_daq_error_count)
if daq_error_count is None:
stat = self.cafe.getStatus(pv_daq_error_count)
self.check_status(_pymodule, 'getCache', pv_daq_error_count, stat,
utils.line_no())
# Put values in dictionary for inspection
for i, (dict_key) in enumerate(self.pv_value_dict.keys()):
self.pv_value_dict[dict_key] = value_list[i]
if self.debug:
print ("EPICS PVS==>", self.pv_value_dict, flush=True)
print ("No of turns", self.pv_value_dict['nturns'])
print(f'EPICS PVS==> {self.pv_value_dict}', flush=True)
#In GUI
#self.cafe.monitor(pv_daq_ready)
#Not in GUI
self.cafe.monitor(pv_daq_error_count)
return ambient_data
def extract_peak_data(self):
y1_peaks_pre = signal.find_peaks(self.y1_sample, height=0.005)
y1_peaks_avg = np.average(y1_peaks_pre[1]['peak_heights'])
y1_height = y1_peaks_avg * 0.726667
y2_peaks_pre = signal.find_peaks(self.y2_sample, height=0.005)
y2_peaks_avg = np.average(y2_peaks_pre[1]['peak_heights'])
y2_height = y2_peaks_avg * 0.566667
print("AVG = ", y1_height, y2_height, flush=True)
y1_peaks = signal.find_peaks(self.y1_sample, height=y1_height)
y2_peaks = signal.find_peaks(self.y2_sample, height=y2_height)
print("PEAKS==>", y1_peaks, y2_peaks, len(y1_peaks[0]), len(y2_peaks[0]), flush=True)
print(y1_peaks[1]['peak_heights'] ,flush=True)
''' Using signal package for peak search
'''
if not self.simulation:
height = 50.0
else:
height = 0.005
y1_peaks_pre = signal.find_peaks(self.y1_sample, height=height,
distance=10)
##y1_peaks_avg = np.average(y1_peaks_pre[1]['peak_heights'])
min_y1_p = np.min(y1_peaks_pre[1]['peak_heights'])
max_y1_p = np.max(y1_peaks_pre[1]['peak_heights'])
print(f'min and max value of peak {min_y1_p}, {max_y1_p}')
y1_height = min_y1_p * 0.9 # y1_peaks_avg * 0.726667
y2_peaks_pre = signal.find_peaks(self.y2_sample, height=height,
distance=10)
##y2_peaks_avg = np.average(y2_peaks_pre[1]['peak_heights'])
min_y2_p = np.min(y2_peaks_pre[1]['peak_heights'])
max_y2_p = np.max(y2_peaks_pre[1]['peak_heights'])
print(f'min and max value of peak {min_y2_p}, {max_y2_p}')
y2_height = min_y2_p * 0.9 # y2_peaks_avg * 0.566667
print(f'AVG = {y1_height}, {y2_height}', flush=True)
y1_peaks = signal.find_peaks(
self.y1_sample, height=y1_height, distance=5)
y2_peaks = signal.find_peaks(
self.y2_sample, height=y2_height, distance=5)
print((f'PEAKS==> {y1_peaks}, {y2_peaks},' +
f'{len(y1_peaks[0])}, {len(y2_peaks[0])}'), flush=True)
print(y1_peaks[1]['peak_heights'], flush=True)
#import sys
#sys.exit()
# sys.exit()
self.y1_pulse = (y1_peaks[1]['peak_heights'])
self.y2_pulse = (y2_peaks[1]['peak_heights'])
def measure(self):
''' Enable DAQ and read in the collected data from EPICS
'''
if self.abort:
self.aborting(utils.line_no())
return None
self.aborting(utils.line_no())
return None
self.parent.from_hdf = False
#Start and Stop Run
#Collect Data and out into numpy array
#Read Data file if simulation
#raw data
# Start and Stop Run
# Collect Data and out into numpy array
# Read Data file if simulation
# raw data
self.y1_sample = []
self.y2_sample = []
self.t_sample = []
#filtered raw data correspoding to max amplitude of pulse
# filtered raw data correspoding to max amplitude of pulse
self.y1_pulse = []
self.y2_pulse = []
self.t_pulse = []
def extract_raw_data():
'''Oscilloscope data
'''
t_inc = 0
for count, entry in enumerate(self.content[5:]):
entry=entry.replace('\n','')
val=entry.split('\t')
for entry in self.content[5:]:
entry = entry.replace('\n', '')
val = entry.split('\t')
self.t_sample.append(float(t_inc))
self.y1_sample.append(float(val[1])*(-1))
self.y1_sample.append(float(val[1])*(-1))
self.y2_sample.append(float(val[2]))
t_inc += self.t_stepsize
if not self.simulation:
#start DAQ
#Set
# start DAQ
# Set
pv_daq_trigger = self.pv_dict['daqTrigger']
pv_daq_counter = self.pv_dict['daqCounter']
pv_daq_ready = self.pv_dict['daqReady']
pv_daq_error_count = self.pv_dict['daqErrorCount']
pv_wf_entry = self.pv_dict['wfEntry']
pv_wf_exit = self.pv_dict['wfExit']
pv_wf = [pv_wf_entry, pv_wf_exit]
self.daq_counter = self.cafe.getCache(pv_daq_counter)
print("original cnt", self.daq_counter, flush=True)
stat = self.cafe.set(pv_daq_trigger, 24)
self.check_status(_pymodule, "set", pv_daq_trigger, stat,
self.daq_ready = self.cafe.getCache(pv_daq_ready)
stat = self.cafe.set(pv_daq_trigger, 8)
self.check_status(_pymodule, 'set', pv_daq_trigger, stat,
utils.line_no())
time.sleep(0.2)
stat = self.cafe.set(pv_daq_trigger, 0)
self.check_status(_pymodule, "set", pv_daq_trigger, stat,
self.check_status(_pymodule, 'set', pv_daq_trigger, stat,
utils.line_no())
#Monitor DAQ State
# Monitor DAQ State
start = time.time()
finished = False
icount = 0
value = 0
while (time.time() - start) < self.daq_timeout:
if self.abort:
self.aborting(utils.line_no())
self.aborting(utils.line_no())
return None
value = self.cafe.getCache(pv_daq_counter)
print("present cnt", value, flush=True)
value = self.cafe.getCache(pv_daq_ready)
print('present cnt', value, flush=True)
if value is None:
stat = self.cafe.getStatus(pv_daq_counter)
self.check_status(_pymodule, "getCache", pv_daq_counter, stat,
utils.line_no())
elif value == (self.daq_counter+1):
stat = self.cafe.getStatus(pv_daq_ready)
self.check_status(_pymodule, 'getCache', pv_daq_ready,
stat, utils.line_no())
elif value != 0:
finished = True
break
break
time.sleep(1.0)
icount += 1
progress = int(100*icount/self.daq_timeout)
print("progress", progress, flush=True)
self.trigger_progressbar.emit(progress)
if progress > PROGRESS_THREAD_ERROR:
self.trigger_progressbar.emit(progress)
if not finished:
mess = ("DAQ not completed. Exceeded allowed " +
"time limit of {0}s".format(self.daq_timeout))
mess = ('DAQ not completed. Exceeded allowed ' +
f'time limit of {self.daq_timeout}s')
self.parent.trigger_log_message.emit(
MsgSeverity.ERROR.name, _pymodule, utils.line_no(),
mess, {})
MsgSeverity.ERROR.name, _pymodule, utils.line_no(),
mess, {})
return None
daq_error_count = self.cafe.getCache(pv_daq_error_count)
if daq_error_count is None:
stat = self.cafe.getStatus(pv_daq_error_count)
self.check_status(_pymodule, 'getCache', pv_daq_error_count,
stat, utils.line_no())
elif daq_error_count:
mess = ('Results discarded as DAQ reports ' +
f'{daq_error_count} errors')
self.parent.trigger_log_message.emit(
MsgSeverity.ERROR.name, _pymodule, utils.line_no(),
mess, {})
return None
#Read WF from EPICS and fill sample y1_sample, y2_sample
# Read WF from EPICS and fill sample y1_sample, y2_sample
(self.y1_sample, self.y2_sample), status, status_list = \
self.cafe.getCompoundList(pv_wf, cacheFlag=False)
if status != self.cyca.ICAFE_NORMAL:
self.check_status_list(_pymodule, "getCompoundList",
pv_wf, status_list, utils.line_no())
self.check_status_list(_pymodule, 'getCompoundList',
pv_wf, status_list, utils.line_no())
return None
print(f'y1 sample length = {len(self.y1_sample)}')
print(f'y2 sample length = {len(self.y2_sample)}', flush=True)
print("y1 sample length ", len(self.y1_sample))
print("y2 sample length ", len(self.y2_sample), flush=True)
#series = pd.Series(self.y1_sample)
series = pd.Series(self.y1_sample)
#self.y1_sample = (series * (-1)).tolist()
self.y1_sample = (series).tolist()
self.t_sample = [None] * len(self.y1_sample)
self.t_sample[0] = 0
t_inc = 0
for i in range(1, len(self.y1_sample)):
t_inc += self.t_stepsize
self.t_sample[i] = t_inc
else:
self.trigger_progressbar.emit(20)
print("open File", flush=True)
file = open('/hipa/bd/data/measurements/tina/20240710-223007_2000.txt','r')
self.trigger_progressbar.emit(40)
self.trigger_progressbar.emit(30)
with open(
'/hipa/bd/data/measurements/tina/20240710-223007_2000.txt',
'r', encoding='utf-8') as file:
self.content = file.readlines()
if self.abort:
file.close()
self.aborting(utils.line_no())
return None
self.content = file.readlines()
file.close()
if self.abort:
self.aborting(utils.line_no())
return None
print("close File", flush=True)
self.aborting(utils.line_no())
return None
self.trigger_progressbar.emit(60)
extract_raw_data()
self.extract_peak_data()
if self.abort:
self.aborting(utils.line_no())
return None
self.trigger_progressbar.emit(70)
#Fill Raw data here
self.aborting(utils.line_no())
return None
self.trigger_progressbar.emit(70)
# Fill Raw data here
rawdata = {
'y1': list(self.y1_sample),
'y2': list(self.y2_sample),
't': list(self.t_sample),
}
return rawdata
def unpack_hdf_data(self):
self.y1_sample = self.raw_data['y1']
self.y2_sample = self.raw_data['y2']
self.t_sample = self.raw_data['t']
self.extract_peak_data()
def process(self, from_hdf5=False):
''' Process the collected data
'''
if self.abort:
self.aborting(utils.line_no())
return None
self.aborting(utils.line_no())
return None
self.trigger_progressbar.emit(95)
if from_hdf5:
self.unpack_hdf_data()
self.mean_amplitude_y1 = np.mean(self.y1_pulse, keepdims=True)
self.mean_amplitude_y2 = np.mean(self.y2_pulse, keepdims=True)
self.std_amplitude_y1 = np.std(self.y1_pulse, keepdims=True)
self.std_amplitude_y2 = np.std(self.y2_pulse, keepdims=True)
self.normalized_amplitude_envelope_1 = (
self.y1_pulse - self.mean_amplitude_y1)
self.y1_pulse - self.mean_amplitude_y1)/self.std_amplitude_y1
self.normalized_amplitude_envelope_2 = (
self.y2_pulse - self.mean_amplitude_y2)
self.y2_pulse - self.mean_amplitude_y2)/(self.std_amplitude_y2*len(self.y2_pulse))
self.corr_full = signal.correlate(
self.normalized_amplitude_envelope_2,
self.normalized_amplitude_envelope_1, mode='full', method='auto')
@@ -543,65 +590,66 @@ class AnalysisProcedure(QObject):
len(self.normalized_amplitude_envelope_2),
len(self.normalized_amplitude_envelope_1), mode='full')
self.lag_full = int( self.lags_full_array[np.argmax(self.corr_full)])
self.lag_full = int(self.lags_full_array[np.argmax(self.corr_full)])
#self.delay = self.lag_full * self.t_stepsize*self.t_interval
self.delay = float (self.lag_full * self.pulse_stepsize)
print("lag", self.lag_full)
print("delay", self.delay, flush=True)
print("dTcable", self.dTcable, flush=True)
print("rf freq", self.rf_freq, flush=True)
print("harmonic", self.harmonic_no, flush=True)
print("dN pickup", self.dNpickup, flush=True)
self.N_turns = (
((self.delay-self.dTcable*10**(-9))*self.rf_freq*10**6) \
/self.harmonic_no) + self.dNpickup
print("lag = {0}, delay = {1}, nturns={2}".format(
self.lag_full, self.delay, self.N_turns))
self.delay = float(self.lag_full * self.pulse_stepsize)
print('lag', self.lag_full)
print('delay', self.delay, flush=True)
print('dTcable', self.dt_cable, flush=True)
print('rf freq', self.rf_freq, flush=True)
print('harmonic', self.harmonic_no, flush=True)
print('dN pickup', self.dn_pickup, flush=True)
self.n_turns = (
((self.delay-self.dt_cable*10**(-9))*self.rf_freq*10**6)
/ self.harmonic_no) + self.dn_pickup
print((f'lag = {self.lag_full}, ' +
f'delay = {self.delay*10**6:.3f} \u00B5s ' +
f'nturns = {self.n_turns:.4f}'))
if self.abort:
self.aborting(utils.line_no())
return None
#Fill Processed data here
self.aborting(utils.line_no())
return None
# Fill Processed data here
proc_data = {
'y1': self.y1_pulse.tolist(),
'y2': self.y2_pulse.tolist(),
't_stepsize': self.pulse_stepsize,
'lag': self.lag_full,
'delay': self.delay,
'nturns': self.N_turns
'delay': self.delay,
'nturns': self.n_turns
}
return proc_data
def make_figs(self):
''' Figure construction with matplotlib
'''
fig, (ax) = plt.subplots(nrows=2, ncols=1, figsize=(18,9), layout='tight')
fig2, (ax2) = plt.subplots(nrows=1, ncols=1, figsize=(18,9))
fig, (ax) = plt.subplots(nrows=2, ncols=1,
figsize=(18, 9), layout='tight')
fig2, (ax2) = plt.subplots(nrows=1, ncols=1, figsize=(18, 9))
fig.patch.set_facecolor('#FAF9F6')
fig2.patch.set_facecolor('#FAF9F6')
ln=500
off = 10000
ln = 500 # 500
off = 0 # 10000
s = off
e = off +ln
e = off + ln
#ax[0].ticklabel_format(useOffset=False, style='plain')
ax[0].plot(self.t_sample[s:e], self.y1_sample[s:e], '.r-', label='')
ax[1].plot(self.t_sample[s:e], self.y2_sample[s:e], '.r-', label='' )
ax[1].plot(self.t_sample[s:e], self.y2_sample[s:e], '.r-', label='')
ax[0].xaxis.set_major_locator(
ticker.MultipleLocator(self.t_stepsize*self.t_interval))
ax[0].set_xlabel('Time [s]')
ax[0].set_ylabel('Amplitude')
ax[0].set_title('Pulse at Entry')
ax[0].set_facecolor("lightgrey")
#ax[0].legend()
ax[0].set_facecolor('lightgrey')
# ax[0].legend()
ax[0].grid(visible=True, which='major', axis='both',
linestyle='--', linewidth=0.8)
ax[1].xaxis.set_major_locator(
@@ -609,38 +657,41 @@ class AnalysisProcedure(QObject):
ax[1].set_xlabel('Time [s]')
ax[1].set_ylabel('Amplitude')
ax[1].set_title('Pulse at Exit')
ax[1].set_facecolor("lightgray")
ax[1].set_facecolor('lightgray')
ax[1].grid(visible=True, which='major', axis='both',
linestyle='--', linewidth=0.8)
#ax[1].legend()
ax2.set_title('Cross-correlation between {0} Entrance and Exit'.format(
self.accelerator), fontsize=16)
linestyle='--', linewidth=0.8)
# ax[1].legend()
ax2.set_title(
f'Cross-correlation between {self.accelerator} Entrance and Exit',
fontsize=16)
ax2.set_ylabel('x-corr', fontsize=14)
ax2.set_xlabel('t (units of {0:.2f} ns)'.format(
self.pulse_stepsize*10**9), fontsize=14)
ax2.set_xlabel(f't (units of {self.pulse_stepsize*10**9:.2f} ns)',
fontsize=14)
ax2.grid(visible=True, which='major', axis='both', linestyle='--',
linewidth=0.8)
ax2.set_facecolor("#F5F5F5")
ax2.set_facecolor('#F5F5F5')
ax2.plot(self.lags_full_array[:], self.corr_full[:])
xmin, xmax, ymin, ymax = ax2.axis()
lineStart = ymin #self.corr_full.min()
lineEnd = ymax #self.corr_full.max()
print("start end ", lineStart, lineEnd)
ax2.plot([self.lag_full, self.lag_full], [lineStart, lineEnd],
':', color = 'r')
ax2.set_ylim(lineStart, lineEnd)
#ax2[1].plot(self.lags_full_array[:], self.corr_full[:], 'yo')
text = "No of Turns = {0}".format(int(self.N_turns))
_, _, ymin, ymax = ax2.axis()
line_start = ymin # self.corr_full.min()
line_end = ymax # self.corr_full.max()
ax2.plot([self.lag_full, self.lag_full], [line_start, line_end],
':', color='r')
ax2.set_ylim(line_start, line_end)
text = f'No of Turns = {self.n_turns:0.0f}'
plt.figtext(0.65, 0.82, self.accelerator, weight='bold', fontsize=16)
plt.figtext(0.65, 0.77, text, weight='bold', fontsize=16)
plt.figtext(0.7, 0.72, "lag = {0}".format(self.lag_full), weight='normal',
if self.injector2_current != 0:
inj_current_text = f'I Inj2 = {self.injector2_current:.3f} mA'
plt.figtext(0.80, 0.85, inj_current_text, weight='normal',
fontsize=10)
plt.figtext(0.7, 0.72, f'lag = {self.lag_full}', weight='normal',
fontsize=14)
text = 'delay = {0:.3f} (\u00B5s)'.format(self.delay*10**6)
text = f'delay = {self.delay*10**6:.3f} \u00B5s'
plt.figtext(0.7, 0.67, text, weight='normal', fontsize=14)
if self.settings.data["GUI"]["showDate"]:
if self.settings.data['GUI']['showDate'] == 1:
plt.figtext(0.75, 0.12, self.time_stamp, size='small')
fig_data = {'Canvas 1': [fig2], 'Canvas 2': [fig]}
return fig_data

View File

@@ -1,26 +1,17 @@
'''Main panel gui interface
'''
import os
#from qtpy import QtCore, QtGui
from qtpy.QtGui import QColor, QPixmap
from qtpy.QtGui import QColor
from qtpy.QtCore import __version__ as QT_VERSION_STR
from qtpy.QtCore import QEventLoop, Qt, QTimer, Slot
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (
QApplication, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QMessageBox,
QPushButton, QStackedWidget, QTabBar, QTabWidget, QTableWidgetItem,
QTextEdit, QVBoxLayout, QWidget)
QGridLayout, QGroupBox, QLabel, QStackedWidget, QTabBar, QTabWidget, QWidget)
from common.packages import elog
from apps4ops.bdbase.utils import _line
from apps4ops.bdbase.enumkind import MsgSeverity
from caqtwidgets.pvwidgets import (
CAQLabel, CAQLineEdit, CAQMessageButton, CAQTableWidget, CAQTextEntry,
QHLine)
from caqtwidgets.pvwidgets import CAQLabel
_pymodule = os.path.basename(__file__)
class AppGui(QWidget):
''' Main GUI class
'''
@@ -37,7 +28,7 @@ class AppGui(QWidget):
self.accelerator_list = [self.parent.injector_2,
self.parent.ring_cyclotron]
self.send_to_log_window = self.parent.send_to_log_window
self.show_log_message = self.parent.show_log_message
self.statusbar = self.parent.statusbar
@@ -57,68 +48,67 @@ class AppGui(QWidget):
self.gui_frame.operator_wgt.setFixedHeight(640)
self.gui_frame.expert_wgt.setFixedHeight(240)
self.daq_wgt = self.daq_group_qtabwidget(widget_type="QStackedWidget")
self.daq_wgt = self.daq_group_qtabwidget(widget_type='QStackedWidget')
self.gui_frame.measurement_layout.addWidget(
self.daq_wgt, 0, 1, 1, 1, alignment=Qt.AlignTop)
self.daq_wgt.setCurrentIndex(self.parent.default_idx)
self.daq_wgt.currentChanged.emit(self.parent.default_idx)
self.gui_frame.line_sender_dict[
'accelerator'].currentChanged.connect(self.cb_accelerator)
def cb_accelerator(self, idx):
self.daq_wgt.setCurrentIndex(idx)
def daq_group_qtabwidget(self, widget_type="QStackedWidget"):
def daq_group_qtabwidget(self, widget_type='QStackedWidget'):
accel_wgt_dict = {}
if "QTabWidget" in widget_type:
if 'QTabWidget' in widget_type:
accel_tab_widget = QTabWidget()
accel_tab_widget.setFont(self.font_gui)
accel_tab_widget.setStyleSheet("QTabBar {font-size: 10pt;}")
accel_tab_widget.setStyleSheet('QTabBar {font-size: 10pt;}')
accel_tab_widget.tabBar().setShape(QTabBar.TriangularNorth)
for i, accel in enumerate(self.accelerator_list):
accel_wgt_dict[accel] = self.daq_group(accel)
accel_tab_widget.addTab(accel_wgt_dict[accel], accel)
color = self.settings.data['Parameters']['accelerator'][data][
color = self.settings.data['Parameters']['accelerator']['data'][
'color'][i]
accel_tab_widget.tabBar().setTabTextColor(i, QColor(color))
else:
accel_tab_widget = QStackedWidget()
for i, accel in enumerate(self.accelerator_list):
accel_wgt_dict[accel] = self.daq_group(accel)
accel_wgt_dict[accel] = self.daq_group(accel)
accel_tab_widget.addWidget(accel_wgt_dict[accel])
accel_tab_widget.setFixedWidth(300)
accel_tab_widget.setFixedHeight(160)
return accel_tab_widget
def daq_group(self, accel):
group_box = QGroupBox("{0} DAQ".format(accel))
obj_name = "CYCLOTRON" if self.parent.ring_cyclotron in accel else \
"INJECTOR"
group_box = QGroupBox(f'{accel} DAQ')
obj_name = 'CYCLOTRON' if self.parent.ring_cyclotron in accel else \
'INJECTOR'
group_box.setObjectName(obj_name)
vbox = QGridLayout()
pv_daq = []
pv_daq.append(self.settings.data['PV'][accel]['daqTrigger'])
pv_daq.append(self.settings.data['PV'][accel]['daqCounter'])
pv_daq.append(self.settings.data['PV'][accel]['daqReady'])
self.cafe.openPrepare()
self.cafe.open(pv_daq)
self.cafe.openNowAndWait(1.0)
pv1 = CAQLabel(self, pv_name=pv_daq[0])
pv2 = CAQLabel(self, pv_name=pv_daq[1])
pv1.setFixedWidth(40)
pv2.setFixedWidth(40)
vbox.addWidget(QLabel('DAQ Trigger:'), 0, 0)
vbox.addWidget(QLabel('Event Counter:'), 1, 0)
vbox.addWidget(QLabel('DAQ Ready:'), 1, 0)
vbox.addWidget(pv1, 0, 1)
vbox.addWidget(pv2, 1, 1)
vbox.setContentsMargins(9, 19, 9, 9)
@@ -134,5 +124,5 @@ class AppGui(QWidget):
grid = QGridLayout()
grid.addWidget(group_box, 0, 0)
qw.setLayout(grid)
return qw
return qw

View File

@@ -16,15 +16,17 @@
"header" : ["SHIFT", "INJ2", "IP2", "IW2", "PK1", "PK2", "SINQ", "UCN"],
"PV" : {"Injector": {"nturns": "PV-INJ:NTURNS",
"daqTrigger": "PV-INJ:TRG",
"daqCounter": "PV-INJ:CNT",
"daqReady": "PV-INJ:READY",
"daqErrorCount": "PV-INJ:ERR_CNT",
"wfEntry": "PV-INJ:WF-ENTRY",
"wfExit": "PV-INJ:WF-EXIT"
},
"Cyclotron": {"nturns": "PV-CYC:NTURNS",
"daqTrigger": "ZTEST-CPSI-TCRING:FW-SIS0DAQ-TRG",
"daqCounter": "ZTEST-CPSI-TCRING:FW-SIS0DAQ-EVENT-CNT",
"wfEntry": "ZTEST-CPSI-TCRING:FW-SIS0DAQ-CH0-AMPLT-WF",
"wfExit": "ZTEST-CPSI-TCRING:FW-SIS1DAQ-CH0-AMPLT-WF"
"Cyclotron": {"nturns": "ZTEST-CPSI-TCRING:TURN-NUM",
"daqTrigger": "ZTEST-CPSI-TCRING:SCOPE-TRG",
"daqReady": "ZTEST-CPSI-TCRING:SCOPE-READY",
"daqErrorCount": "ZTEST-CPSI-TCRING:ERR-CNT",
"wfEntry": "ZTEST-CPSI-TCRING:SCOPE-CH0",
"wfExit": "ZTEST-CPSI-TCRING:SCOPE-CH1"
}
},
"HIPA": ["Injector", "Ring"],

269
tina.py
View File

@@ -1,14 +1,13 @@
"""Tina.py module for measuring the number of turns
"""
'''Tina.py module for measuring the number of turns
'''
import inspect
import os
import platform
import sys
import time
from qtpy.QtCore import __version__ as QT_VERSION_STR
from qtpy.QtCore import PYQT_VERSION_STR, Signal, Slot
from qtpy.QtCore import PYQT_VERSION_STR, Slot
from qtpy.QtWidgets import QApplication, QMessageBox
from apps4ops.bdbase.base import BaseWindow
@@ -23,121 +22,113 @@ from pyrcc5 import tina_resources
_pymodule = os.path.basename(__file__)
_appname, _appext = _pymodule.split(".")
_appname, _appext = _pymodule.split('.')
_abspath = os.path.dirname(os.path.abspath(__file__))
_appversion = "0.0.1"
_title = "No of Turns Measurement"
_appversion = '0.0.1'
_title = 'No of Turns Measurement'
_appname = "Tina"
_appname = 'Tina'
class StartMain(BaseWindow):
trigger_log_message = Signal(str, str, int, str, dict)
trigger_progressbar = Signal(int)
trigger_progressbar_str = Signal(int, str)
''' Application to measure the no of turns in Injector 2 and the
Ring Cyclotron
'''
ring_cyclotron = 'Cyclotron'
injector_2 = 'Injector'
def __init__(self, parent=None):
def __init__(self, parent=None):
super().__init__(
parent=parent, pymodule=_pymodule, appversion=_appversion,
title=_title, user_mode=UserMode.OPERATION, facility=Facility.HIPA,
has_optics=False, has_procedure=True)
self.appname = _appname
self.source_file = _abspath #required for HDF
self.source_file = _abspath # required for HDF
self.elog_enum = ElogHIPA()
self.message_elog = None
self.default_idx = self.settings.data['Parameters']['accelerator'][
'data']['value']
self.accelerator = self.ring_cyclotron if self.default_idx else \
self.injector_2
#self.from_hdf = False in base class
self.message = ""
# self.from_hdf = False in base class
self.message = ''
self.gui = AppGui(self)
def prepare_results_message(self):
"""Prepare results message
"""
"""
try:
self.no_turns = self.all_data["Processed data"]["nturns"]
lag_full = self.all_data["Processed data"]["lag"]
delay = self.all_data["Processed data"]["delay"]
self.no_turns = self.all_data['Processed data']['nturns']
lag_full = self.all_data['Processed data']['lag']
delay = self.all_data['Processed data']['delay']
except KeyError:
self.message = ""
self.message_elog = ""
self.message = ''
self.message_elog = ''
return
try:
self.accelerator = self.all_data["Input data"]["accelerator"]
self.accelerator = self.all_data['Input data']['accelerator']
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
try:
self.accelerator = self.all_data["Input data"]['qtabdata']
self.logger.debug(f'KeyError {ex}')
try:
self.accelerator = self.all_data['Input data']['qtabdata']
except KeyError as ex:
self.logger.debug("KeyError {0}".format(ex))
self.logger.debug(f'KeyError {ex}')
_mess = "Reanalysis from HDF5. " if self.from_hdf else ""
mess = 'Reanalysis from HDF5. ' if self.from_hdf else ''
self.message_elog = (
_mess +
'''
No. turns measured in the {0} = {1} ({2:.2f}) <br>
mess +
'''No. turns measured in the {0} = {1:0.0f} ({2:.2f}) <br>
lag = {3}, delay = {4:.3f} \u00B5s<br>'''.format(
self.accelerator, int(self.no_turns), self.no_turns,
self.accelerator, (self.no_turns), self.no_turns,
lag_full, delay*10**6))
self.message = (
_mess +
mess +
'''
No. turns measured in the {0} = {1} ({2:.2f})
No. turns measured in the {0} = {1:0.0f} ({2:.2f})
lag = {3}, delay = {4:.3f} \u00B5s'''.format(
self.accelerator, int(self.no_turns), self.no_turns,
self.accelerator, self.no_turns, self.no_turns,
lag_full, delay*10**6))
def prepare_elog_message(self):
"""Define elog parameters and define message
"""
'''Define elog parameters and define message
'''
self.projekt_idx = self.elog_enum.projekt.NONE
self.system_idx = self.elog_enum.system.BEAMDYNAMICS
self.eintrag_idx = self.elog_enum.eintrag.INFO
self.ort_idx = self.elog_enum.ort.RING_CYCLOTRON if \
'Cyclotron' in self.accelerator else self.elog_enum.ort.INJECTOR2
'Cyclotron' in self.accelerator else self.elog_enum.ort.INJECTOR2
self.status_idx = self.elog_enum.status.NONE
self.effekt_idx = self.elog_enum.effekt.NO
self.attach_files = []
simulation = self.input_parameters["simulation"]
simulation = self.input_parameters['simulation']
if self.all_data:
if self.all_data["Input data"] is not None:
if self.all_data['Input data'] is not None:
try:
simulation = self.all_data["Input data"]["simulation"]
simulation = self.all_data['Input data']['simulation']
except KeyError:
simulation = self.input_parameters["simulation"]
simulation = self.input_parameters['simulation']
pass
self.logbook = "Sandkasten" if simulation else "HIPA"
self.logbook = 'Sandkasten' if simulation else 'HIPA'
self.title = _title
def verify_analysis_preconditions(self):
if self.injector_2 in self.input_parameters['accelerator']:
mess = ("Measurement procedure for Injector 2 \n" +
"has not yet been implementented.")
QMessageBox.information(self, "Injector 2", mess, QMessageBox.Ok)
mess = ('Measurement procedure for Injector 2 \n' +
'has not yet been implementented.')
QMessageBox.information(self, 'Injector 2', mess, QMessageBox.Ok)
QApplication.processEvents()
return False
return True
@Slot()
def analysis_thread_finished(self):
BaseWindow.analysis_thread_finished(self)
@@ -146,62 +137,56 @@ class StartMain(BaseWindow):
if self.all_data['Figure data'] is not None:
self.gui_frame.central_tab_widget.setCurrentIndex(1)
except KeyError:
print("No analysis performed")
print('No analysis performed')
return
else:
print("thread finished with no data")
ncanvas = len(self.settings.data["GUI"]["subResultsTabTitle"])
print('Thread finished with no data')
ncanvas = len(self.settings.data['GUI']['subResultsTabTitle'])
dict_fig = {}
dict_fig['Figure data'] = {}
for i in range(0, ncanvas):
canvas = "Canvas {0}".format(i+1)
dict_fig['Figure data'][canvas] = None
#Delete old figures
self.gui.gui_frame.canvas_update(dict_fig['Figure data'])
canvas = f'Canvas {i+1}'
dict_fig['Figure data'][canvas] = None
# Delete old figures
self.gui.gui_frame.canvas_update(dict_fig['Figure data'])
return
self.prepare_results_message()
self.show_log_message(MsgSeverity.INFO, _pymodule, utils.line_no(),
self.message)
@Slot()
def hdf_thread_finished(self):
def hdf_thread_finished(self):
BaseWindow.hdf_thread_finished(self)
self.prepare_results_message()
self.show_log_message(MsgSeverity.INFO, _pymodule, utils.line_no(),
self.message)
@Slot()
def save_to_hdf_dialog(self):
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Saving duplicate data to HDF is declined.")
QMessageBox.information(self, "HDF", _mess, QMessageBox.Ok)
mess = ('This is a repeat analysis from HDF. \n' +
'Saving duplicate data to HDF is declined.')
QMessageBox.information(self, 'HDF', mess, QMessageBox.Ok)
QApplication.processEvents()
return False
BaseWindow.save_to_hdf_dialog(self)
BaseWindow.save_to_hdf_dialog(self)
@Slot()
def save_to_hdf(self, from_dialog=False):
if not self.verify_save_to_hdf():
return False
'''
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Saving duplicate data to HDF is declined.")
QMessageBox.information(self, "HDF", _mess, QMessageBox.Ok)
mess = ('This is a repeat analysis from HDF. \n' +
'Saving duplicate data to HDF is declined.')
QMessageBox.information(self, 'HDF', mess, QMessageBox.Ok)
QApplication.processEvents()
return False
'''
if self.all_data is not None:
self.save_hdf_thread = self.HDFSave(self, from_dialog)
@@ -212,43 +197,33 @@ class StartMain(BaseWindow):
time.sleep(0.05) # Wait a tick
return True
def add_to_hdf(self, dataH5, proc=True, raw=False):
"""User supplied hdf data
"""
def add_to_hdf(self, datah5, proc=True, raw=False):
'''User supplied hdf data
'''
if self.all_data is not None:
#All but 'Figure data'
print("add_to_hdf")
#print(self.all_data['Rawdata'], flush=True)
#print("raw data==========>", self.all_data['Raw data']['y1'][0:5], flush=True)
#print("raw data==========>", self.all_data['Raw data']['y2'][0:5], flush=True)
#print("raw data==========>", self.all_data['Rawdata']['t'][0:5], flush=True)
_all_data = {}
_all_data ['Raw data'] = {}
print("add_to_hdf//")
_all_data['Raw data']['Input_data'] = self.all_data[
all_data = {}
all_data['Raw data'] = {}
all_data['Raw data']['Input_data'] = self.all_data[
'Input data']
_all_data['Raw data']['Ambient_data'] = self.all_data[
all_data['Raw data']['Ambient_data'] = self.all_data[
'Ambient data']
_all_data['Raw data']['Processed_data'] = self.all_data[
all_data['Raw data']['Processed_data'] = self.all_data[
'Processed data']
_all_data['Raw data']['Raw_data'] = self.all_data[
all_data['Raw data']['Raw_data'] = self.all_data[
'Raw data']
#_all_data['Raw_data'] = self.all_data['Rawdata']
#del self.all_data['Figure data']
h5_storage.saveH5Recursive(
self.hdf_filename, _all_data['Raw data'], dataH5)
self.hdf_filename, all_data['Raw data'], datah5)
@Slot()
def send_to_elog(self):
"""Override abstract method
"""
'''Override abstract method
'''
@Slot()
def save_fig_thread_finished():
"""Can take a few seconds to send to elog,
'''Can take a few seconds to send to elog,
hence choose do this in a thread.
"""
'''
time.sleep(0.2)
if self.all_data:
@@ -266,8 +241,6 @@ class StartMain(BaseWindow):
time.sleep(0.5)
self.prepare_elog_message()
print(self.message_elog, flush=True)
if not self.all_data:
QSendToELOG(self, logbook=self.logbook,
@@ -286,13 +259,13 @@ class StartMain(BaseWindow):
if not os.path.exists(folder_name):
os.makedirs(folder_name)
time_in_seconds = self.all_data["Ambient data"]["Time in seconds"]
time_in_seconds = self.all_data['Ambient data']['Time in seconds']
try:
reanalysis_time = self.all_data["Processed data"][
"Reanalysis time in seconds"]
reanalysis_time = self.all_data['Processed data'][
'Reanalysis time in seconds']
except KeyError:
reanalysis_time = None
self.folder_name = folder_name
save_fig_thread = self.SaveFigureThread(
@@ -302,42 +275,42 @@ class StartMain(BaseWindow):
save_fig_thread.start()
time.sleep(0.05)
def save_to_epics(self):
""" Write the number of turns calculated to an EPICS PV
"""
def save_to_epics(self):
''' Write the number of turns calculated to an EPICS PV
'''
if not BaseWindow.verify_save_to_epics(self):
return False
if self.from_hdf:
_mess = ("This is a repeat analysis from HDF. \n" +
"Analysis results are not saved to EPICS")
QMessageBox.information(self, "EPICS", _mess, QMessageBox.Ok)
mess = ('This is a repeat analysis from HDF. \n' +
'Analysis results are not saved to EPICS')
QMessageBox.information(self, 'EPICS', mess, QMessageBox.Ok)
QApplication.processEvents()
return False
dict_bunch = {}
debug = True
dry_run = False
nturns = int(self.no_turns)
pv = self.settings.data["PV"]["Cyclotron"]["nturns"]
dict_bunch[pv] = nturns
if not dry_run:
status, status_list = self.send_to_epics(dict_bunch)
simulation = self.input_parameters['simulation']
if not simulation:
dict_bunch = {}
nturns = round(self.no_turns)
pv = self.settings.data['PV']['Cyclotron']['nturns']
dict_bunch[pv] = nturns
status,_ = self.send_to_epics(dict_bunch)
if status == self.cyca.ICAFE_NORMAL:
message = "Saved data to EPICS; No turns = {0}".format(nturns)
mess = f'Saved data to EPICS; No turns = {nturns}'
sev = MsgSeverity.INFO
else:
message = "Value (nturns={0}) not saved to epics".format(nturns)
mess = f'Value (nturns={nturns}) not saved to epics'
sev = MsgSeverity.ERROR
self.show_log_message(sev.name, _pymodule, utils.line_no(), message)
self.show_log_message(sev.name, _pymodule, utils.line_no(), mess)
return True
@Slot()
def closeEvent(self, event):
""" Close application only if conditions allow
"""
'''Close application only if conditions allow
'''
if not self.verify_close_event():
event.ignore()
return
@@ -346,16 +319,17 @@ class StartMain(BaseWindow):
@Slot()
def show_about(self):
""" Behind the scences information
"""
'''Behind the scences information
'''
QApplication.processEvents()
QMessageBox.about(
self, "About",
self, 'About',
"""<b>{0}</b> v {1}
<p>Copyright &copy; Paul Scherrer Institut (PSI).
All rights reserved.</p>
<p>Authors: P.-A. Duperrex, J. Chrin, A. Facchetti, W. Koprek, </p>
<p>A python implementation of the LabVIEW measurement developed by P.-A. Duperrex <br>
<p>A python implementation of the LabVIEW measurement developed
by P.-A. Duperrex <br>
Ref: P.-A. Duperrex and A. Facchetti <br>
'Number of Turn Measurements on the HIPA Cyclotrons at PSI' <br>
doi:10.18429/JACoW-IPAC2018-WEPAL067 </p>
@@ -371,31 +345,28 @@ class StartMain(BaseWindow):
platform.system()))
QApplication.processEvents()
@Slot()
def show_help(self):
""" Invoke help pages from tina_resources
"""
index_html = "index.html"
help_base = ":"
'''Invoke help pages from tina_resources
'''
index_html = 'index.html'
help_base = ':'
help_page = HelpBrowser(help_base, index_html, self)
help_page.show()
#########################################################################
if __name__ == "__main__":
if __name__ == '__main__':
app = QApplication(sys.argv)
splash = BaseWindow.initialize_application(
app, appname=_appname, delay=5, facility=Facility.HIPA)
myapp = StartMain()
myapp.show()
if splash is not None:
splash.finish(myapp)
app.exec_()

24
tina.sh
View File

@@ -22,7 +22,8 @@ _EPICS_HOST_ARCH=${RHREL}-x86_64
# Select Python Version here. Currently one of 3.5, 3.7, 3.8 and 3.10
PYTHON_VERSION=3.10
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.20.0-gcc-7.5.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
#cafe-1.20.0-gcc-7.5.0
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.21.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
if [ "$1" ]; then
@@ -30,21 +31,24 @@ if [ "$1" ]; then
echo "Using default version $PYTHON_VERSION"
elif [ "$1" == "3.7" -o "$1" == "37" ]; then
PYTHON_VERSION=3.7
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.20.0-gcc-7.3.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
module unload gcc
module load gcc/7.3.0
#cafe-1.20.0-gcc-7.3.0
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.21.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
#module unload gcc
#module load gcc/10.4.0
elif [ "$1" == "3.8" -o "$1" == "38" ]; then
PYTHON_VERSION=3.8
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.19.3/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
module unload gcc
module load gcc/7.5.0
#cafe-1.19.3
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.21.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
#module unload gcc
#module load gcc/7.5.0
elif [ "$1" == "3.10" -o "$1" == "310" ]; then
PYTHON_VERSION=3.10
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.20.0-gcc-7.5.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
module unload gcc
module load gcc/7.5.0
#cafe-1.20.0-gcc-7.5.0
PYTHON_PATH=.:/opt/gfa/cafe/python/pycafe/cafe-1.21.0/lib/${_EPICS_HOST_ARCH}:/hipa/bd/applications/deps/apps4ops/v1.12.0
#module unload gcc
#module load gcc/7.5.0
else
echo "Requested Python version is not supported"
echo "Using default version $PYTHON_VERSION"