From b82b023f339bdfa9e7582aad3f15dd04bd8fb71a Mon Sep 17 00:00:00 2001 From: Sven Reiche Date: Tue, 23 May 2023 16:39:27 +0200 Subject: [PATCH] Filling template for the interface module (loading, saving, elog etc) --- app/adaptiveorbit.py | 149 ++++++++++++++++++------ interface/__init__.py | 0 interface/elog.py | 25 ++++ interface/load.py | 74 ++++++++++++ interface/save.py | 150 ++++++++++++++++++++++++ interface/snap.py | 261 ++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 622 insertions(+), 37 deletions(-) create mode 100644 interface/__init__.py create mode 100644 interface/elog.py create mode 100644 interface/load.py create mode 100644 interface/save.py create mode 100644 interface/snap.py diff --git a/app/adaptiveorbit.py b/app/adaptiveorbit.py index 48ee0d7..5c78766 100644 --- a/app/adaptiveorbit.py +++ b/app/adaptiveorbit.py @@ -1,53 +1,128 @@ import time import numpy as np -from sfbd.ext.reichebscombined import ReicheBSCombined -from slic.core.adjustable import PVAdjustable +from bstrd import BSCache +from bstrd.bscache import make_channel_config, is_available +from epics import PV class AdaptiveOrbit: """ Wrapper class to bundle all daq/io needed for adaptive orbit feedback. """ - def __init__(self,beamline='Aramis'): - self.beamline=None - self.sensor = None - self.actuator = None - self.getBeamline(beamline) + def __init__(self): + + # Aramis Channels + self.ARch0 = 'SARFE10-PBIG050-EVR0:CALCI' + self.ARchx = ['SARUN%2.2d-DBPM070:X1' % id for id in range(1,17)] + self.ARchy = ['SARUN%2.2d-DBPM070:Y1' % id for id in range(1,17)] + self.bsAR = self.initBSStream([self.ARch0]+self.ARchx+self.ARchy) + self.pvAR = self.initPV(self.ARchx) + self.kickerAR = self.initPV(['SARMA02-MCRX050:I-SET','SARMA02-MCRY050:I-SET','SARUN02-MCRX080:I-SET','SARUN02-MCRY080:I-SET','SFB_ORBIT_SAR:ONOFF1']) + + # Athos Channels + self.ATch0 = 'SATFE10-PEPG046-EVR0:CALCI' + self.ATchx=[] + self.ATchy=[] + for bpm in range(5,23): + idx = '070' + if bpm == 5 or bpm ==14: + idx='410' + self.ATchx.append('SATUN%2.2d-DBPM%s:X1' % (bpm,idx)) + self.ATchy.append('SATUN%2.2d-DBPM%s:Y1' % (bpm,idx)) + self.bsAT = self.initBSStream([self.ATch0]+self.ATchx+self.ATchy) + self.pvAT = self.initPV(self.ATchx) + self.kickerAR = self.initPV(['SATMA01-MCRX610:I-SET','SATMA01-MCRY610:I-SET','SATUN05-MCRX420:I-SET','SATUN05-MCRY420:I-SET','SFB_ORBIT_SAT:ONOFF1']) + + # select first beamline + self.isAramis = True + + + def initBSStream(self,channels): + print("Initializing BSstream") + bs = BSCache() + bs.stop() + for cnl in channels[1:]: + if not is_available(cnl): + raise ValueError(f"BS-Channel {cbl} is not available") + res = make_channel_config(cnl,None,None) + bs.channels[res]=res + bs.get_var(channels[0]) # this starts also the stream into the cache + return bs + + def initPV(self,chx): + print("Initializing EPICS Channels") + pvs = [] + for x in chx: + pvs.append(PV(x.replace(':X1',':X-REF-FB'))) + pvs.append(PV(x.replace(':X1',':Y-REF-FB'))) + con = [pv.wait_for_connection(timeout=0.2) for pv in pvs] + for i, val in enumerate(con): + if val is False: + name = pv[i].pvname + raise ValueError(f"PV-Channel {name} is not available") + return pvs + + def terminate(self): + print('Stopping BSStream Thread...') + self.bsAR.stop() + self.bsAR.pt.running.clear() # for some reason I have to + self.bsAT.stop() + self.bsAT.pt.running.clear() # for some reason I have to + + def getBeamline(self,beamline): + if beamline == 'Aramis': + self.isAramis = True + else: + self.isAthos = True + + # all routine for accessing the machine (read/write) def read(self): - if not self.sensor or not self.beamline: - return None - data=self.sensor.get_current_value() - if not data: - return None - - retval={'pid':data['pid'], - 'Signal':data[self.channel0[0]], - 'X':np.array([data[i] for i in self.channelX]), - 'Y':np.array([data[i] for i in self.channelY])} - return retval + if self.isAramis: + data=self.bsAR.__next__() + return data['pid'],data[self.ARch0],np.array([data[cnl] for cnl in self.ARchx]),np.array([data[cnl] for cnl in self.ARchy]) + data=self.bsAT.__next__() + return data['pid'],data[self.ATch0],np.array([data[cnl] for cnl in self.ATchx]),np.array([data[cnl] for cnl in self.ATchy]) - def read_adj(self): - if not self.actuator or not self.beamline: - return None - return [pv.get_current_value() for pv in self.actuator] + def readPV(self): + if self.isAramis: + return [pv.value for pv in self.pvAR] + return [pv.value for pv in self.pvAT] - def getBeamline(self,beamline='Aramis'): - if beamline.upper() == 'ARAMIS': - self.beamline='Aramis' - self.channelX=['SARUN%2.2d-DBPM070:X1' % id for id in range(1,17)] - self.channelY=['SARUN%2.2d-DBPM070:Y1' % id for id in range(1,17)] - self.channel0 = ['SARFE10-PBIG050-EVR0:CALCI'] - FB_bpms = [x.replace(':X1',':X-REF-FB') for x in self.channelX] + [x.replace(':Y1',':Y-REF-FB') for x in self.channelY] - elif beamline.upper() == 'ATHOS': - self.beamline=None - else: - self.beamline=None + def setPV(self,fbval): + if self.isAramis: + for i in range(len(fbval)): + self.pvAR[i].value = fbval[i] + return + for i in range(len(fbval)): + self.pvAT[i].value = fbval[i] + + def getPVNames(self): + if self.isAramis: + return [pv.pvname for pv in self.pvAR] + return [pv.pvname for pv in self.pvAT] + + def getDetectorName(self): + if self.isAramis: + return self.ARch0 + return self.ATch0 + + def getKicker(self): + if self.isAramis: + return [pv.value for pv in self.kickerAR] + return [pv.value for pv in self.kickerAT] + + def setKicker(self,vals): + return + if self.isAramis: + for i,val in enumerate(vals): + self.kickerAR[i].value = val return - channels=self.channel0+self.channelX+self.channelY - self.sensor = ReicheBSCombined('AdapiveOrbit',channels) - self.actuator = [PVAdjustable(pv) for pv in FB_bpms] + for i,val in enumerate(vals): + self.kickerAT[i].value = val + - + + diff --git a/interface/__init__.py b/interface/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/interface/elog.py b/interface/elog.py new file mode 100644 index 0000000..b8660df --- /dev/null +++ b/interface/elog.py @@ -0,0 +1,25 @@ +import elog + +def write(text, Title = 'Test', Application = 'SFBD-Module', Attachment = None): + """ + Generates an entry in the electronic logbook of SwissFEL Commisisoning Data + :param text: The text to be placed in the log book + :param Title: Title of the log book entry + :param Application: Name of application which generates the log book entry + :param Attachment: List of attachments to be added to the log book (mostly plots) + :return: Message ID of log book entry + """ + + # supplemental info + Author = 'sfop' + Category = 'Measurement' # Info or Measurement + System = 'Beamdynamics' # Beamdynamics, Operation, Controls + + dict_att = {'Author': Author, 'Application': Application, 'Category': Category, 'Title': Title, 'System': System} + print('\nLog book entry generated') + + logbook = elog.open('https://elog-gfa.psi.ch/SwissFEL+commissioning+data/', user='robot', password='robot') + return logbook.post(text, attributes=dict_att, attachments=Attachment) + + + diff --git a/interface/load.py b/interface/load.py new file mode 100644 index 0000000..01d939f --- /dev/null +++ b/interface/load.py @@ -0,0 +1,74 @@ +import sys +import os +import datetime +import h5py + +# add other classes +#sys.path.append('/sf/bd/packages/SFBD/src') + +class Load: + def __init__(self, logger = None, program = 'SFBD', version = 'v1.0.0'): + + self.program = program + self.version = version + self.author ='S. Reiche' + self.file = None + + + if logger == None: + logging.basicConfig(level=logging.INFO, + format='%(levelname)-8s %(message)s') + self.logger = logging.getLogger(self.program) + self.logger.info('Load class started at %s' % datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + self.logger.info('Version: %s ' % self.version) + self.logger.info('Host: %s' % socket.gethostname()) + else: + self.logger = logger + + + def open(self,filename): + self.file = h5py.File(filename, "r") + + def close(self): + if self.file is not None: + self.file.close() + self.file = None + + def loadSnap(self): + snap={} + if not 'experiment' in self.file.keys(): + return snap + for key1 in self.file['experiment'].keys(): + for key2 in self.file['experiment'][key1].keys(): + val = self.file['experiment'][key1][key2][()] + snap[key1+':'+key2]={'val':val} + return snap + + def loadData(self,scanrun=1): + run='scan_%d' % scanrun + data = {} + for key1 in self.file[run]['data'].keys(): + for key2 in self.file[run]['data'][key1].keys(): + val = self.file[run]['data'][key1][key2][()] + data[key1+':'+key2]=val + return data + + def loadActuator(self,scanrun=1): + run='scan_%d' % scanrun + data = {} + if 'actuators' in self.file[run]['method'].keys(): + for key1 in self.file[run]['method']['actuators'].keys(): + for key2 in self.file[run]['method']['actuators'][key1].keys(): + val = self.file[run]['method']['actuators'][key1][key2][()] + data[key1+':'+key2]={'val':val} + return data + + + + + + + + + + diff --git a/interface/save.py b/interface/save.py new file mode 100644 index 0000000..9517fd0 --- /dev/null +++ b/interface/save.py @@ -0,0 +1,150 @@ +import sys +import os +import datetime +import h5py +import logging +import socket + +# add other classes +#sys.path.append('/sf/bd/packages/SFBD/src') + +class Save: + def __init__(self, logger = None, program = 'SFBD', version = 'v1.0.0'): + + self.program = program + self.version = version + self.author ='S. Reiche' + self.filename=None + self.file = None + + + if logger == None: + logging.basicConfig(level=logging.INFO, + format='%(levelname)-8s %(message)s') + self.logger = logging.getLogger(self.program) + self.logger.info('Save class started at %s' % datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + self.logger.info('Version: %s ' % self.version) + self.logger.info('Host: %s' % socket.gethostname()) + else: + self.logger = logger + + + + def open(self): + year = datetime.datetime.now().strftime('%Y') + month = datetime.datetime.now().strftime('%m') + day = datetime.datetime.now().strftime('%d') + + path = '/sf/data/measurements/%s' % year + if not os.path.exists(path): + os.makedirs(path) + path = '%s/%s' % (path,month) + if not os.path.exists(path): + os.makedirs(path) + path = '%s/%s' % (path,day) + if not os.path.exists(path): + os.makedirs(path) + datetag = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S_%f') + self.filename=('%s/%s_%s' % (path, self.program.replace(' ','_'), datetag)) + self.file = h5py.File(self.filename+'.h5', "w") + + # meta data header + dt = h5py.special_dtype(vlen=bytes) + dset=self.file.create_dataset('general/user',(1,),dtype=dt) + dset[0]=os.getlogin() + dset=self.file.create_dataset('general/application',(1,),dtype=dt) + dset[0]=self.program + dset=self.file.create_dataset('general/author',(1,),dtype=dt) + dset[0]=self.author + dset=self.file.create_dataset('general/version',(1,),dtype=dt) + dset[0]=self.version + dset=self.file.create_dataset('general/created',(1,),dtype=dt) + dset[0]=str(datetime.datetime.now()) + + + def close(self): + if self.file is not None: + self.file.close() + self.file = None + + + def writeSnap(self,val): + for key in val.keys(): + name=key.split(':') + if 'value' in val[key].keys(): + data=val[key]['value'] + elif 'val' in val[key].keys(): + data=val[key]['val'] + else: + continue + dset=self.file.create_dataset('experiment/%s/%s' % (name[0],name[1]),data=[data]) + dset.attrs['system']=self.getSystem(name[0]) + dset.attrs['units']='unknown' + + def writeAnalysis(self,data,scanrun=1): + for key1 in data.keys(): + for key2 in data[key1].keys(): + dset=self.file.create_dataset('scan_%d/analysis/%s/%s' % (scanrun, key1, key2), + data=data[key1][key2]) + + def writeData(self, data, scanrun=1): + if not 'Shot:ID' in data.keys(): + return + shape = data['Shot:ID'].shape + ndim = len(shape) + nsam = shape[-1] + nrec = 0 + if ndim > 1: + nrec = shape[:-1][0] + self.file.create_dataset("scan_%d/method/records" % scanrun,data=[nrec]) + self.file.create_dataset("scan_%d/method/samples" % scanrun,data=[nsam]) + self.file.create_dataset("scan_%d/method/dimension" % scanrun,data=[ndim]) + self.file.create_dataset("scan_%d/method/reducedData" % scanrun,data=[0]) # indicating that there is at least a 2D array for scalar data + # write the sensor raw value + for ele in data.keys(): + name=ele.split(':') + dset=self.file.create_dataset('scan_%d/data/%s/%s' % (scanrun, name[0], name[1]), data=data[ele]) + dset.attrs['system'] = self.getSystem(name[0]) + dset.attrs['units'] = 'unknown' + + + def writeActuator(self,act,scanrun=1): + dt = h5py.special_dtype(vlen=bytes) + dset=self.file.create_dataset("scan_%d/method/type" % scanrun,(1,),dtype=dt) + if act.isActuator: + dset[0]='Scan' + else: + dset[0]='Time Recording' + for ele in act.actuators.keys(): + name=ele.split(':') + dset=self.file.create_dataset("scan_%d/method/actuators/%s/%s" % (scanrun,name[0],name[1]),data=act.actuators[ele]['val']) + dset.attrs['system']=self.getSystem(name[0]) + dset.attrs['units']='unknown' + + + def getSystem(self,name): + if len(name) > 11: + tag=name[8:9] + fulltag=name[8:12] + else: + tag='' + fulltag='' + sys='Unknown' + if tag =='P': + sys='Photonics' + if tag =='D': + sys='Diagnostics' + if fulltag =='DSCR': + sys='Camera' + if tag == 'R': + sys='RF' + if tag == 'M': + sys='Magnets' + if tag == 'U': + sys='Undulator' + return sys + + + + + diff --git a/interface/snap.py b/interface/snap.py new file mode 100644 index 0000000..4bbe1b6 --- /dev/null +++ b/interface/snap.py @@ -0,0 +1,261 @@ +import datetime +import re +import numpy as np +import yaml +import os +import json + +import copy + +from threading import Thread + +import epics + +class Snap: + def __init__(self,filename='/sf/data/applications/snapshot/req/op/SF_settings.yaml'): + + self.pvs = self.parseYAML(filename) + self.doAbort = False + + def my_caget_many(self): + # this skips quite some channels - don't know why? + + pvdata = {} + pvchids = [] + # create, don't connect or create callbacks + for name in self.pvs: + chid = epics.ca.create_channel(name, connect=False, auto_cb=False) # note 1 + pvchids.append(chid) + + # connect + for chid in pvchids: + print(epics.ca.name(chid)) + epics.ca.connect_channel(chid) + + # request get, but do not wait for result + epics.ca.poll() + for chid in pvchids: + print(epics.ca.name(chid)) + epics.ca.get(chid, wait=False) # note 2 + + # now wait for get() to complete + epics.ca.poll() + for chid in pvchids: + print(epics.ca.name(chid)) + val = epics.ca.get_complete(chid,timeout=0.5) + if not val: + pvdata[epics.ca.name(chid)] = np.array([val]) + epics.ca.clear_cache() + return pvdata + + def abort(self): + self.doAbort=True + print('Aborting Snap') + + def getSnapValues(self,force=True): + self.doAbort=False + ret={} + if self.pvs: +# ret=self.my_caget_many() + val = epics.caget_many(self.pvs) + for i,pv in enumerate(self.pvs): + if val[i]: # filter out None values + ret[pv]={'val':float(val[i])} + epics.ca.clear_cache() + return ret,{} + + #------------- + # routines to parse the OP YAML file + + def applyMacro(self,pvs_in,macros): + pvs = [] + for macro in macros: + for key in macro: + tag='$('+key+')' + for pv in pvs_in: + if tag in pv: + pvs.append(pv.replace(tag,macro[key])) + for pv in pvs_in: # copy the ones without macro + if not '$(' in pv: + pvs.append(pv) + return pvs + + def parseYAML(self,filename='/sf/data/applications/snapshot/req/op/SF_settings.yaml'): + pvs = [] + path = os.path.dirname(filename) + with open(filename) as f: + try: + content = yaml.load(f, Loader=yaml.SafeLoader) + if 'include' in content.keys(): + if len(content['include']) > 0: + for cont in content['include']: + retpv = self.parseYAML(path+'/'+cont['name']) + if 'macros' in cont.keys(): + retpv = self.applyMacro(retpv,cont['macros']) + pvs = pvs + retpv + if 'pvs' in content.keys(): + if 'list' in content['pvs']: + for pv in content['pvs']['list']: + pvs.append(pv['name']) + return pvs + return None + + except yaml.YAMLError as e: + print(e) + return None + return None + + + + +class Old: + + def __init__(self): + self.filename = filename + print('Estbalishing snapshot with request file:', filename,flush=True) + self.savepath = savepath + self.tolerance = 0.0005 + self.pvnames = [] + self.pvs = [] + self.mppvnames = [] + self.mppvs = [] + self.machinepar = [] + self.message = '' + if self.filename: + self.openRequestFile(self.filename) + + def openRequestFile(self, filename): + self.filename = filename + self.rootname = self.filename.split('/')[-1] + + isReq = True + if '.yaml' in filename: + isReq = False +# req_file = SnapshotReqFile(path=str(self.filename)) + + if newVersion: + if '.yaml' in filename: + req_file = SnapshotJsonFile(path=str(self.filename)) + else: + req_file = SnapshotReqFile(path=str(self.filename)) + pvs_list= req_file.read()[0] + print('PV List:-------------------------') + for i in range(len(pvs_list)): + print(pvs_list[i]) + print(req_file.read()[1]) + else: + if '.yaml' in filename: + self.filename=None + self.rootname = None + print('YAML files not supported') + return + req_file = SnapshotReqFile(str(self.filename)) + pvs_list = req_file.read() + + + self.pvnames.clear() + self.machinepar.clear() + for ele in pvs_list: + if isinstance(ele, list): + self.pvnames = ele + elif isinstance(ele, dict): + if 'machine_params' in ele.keys(): + self.machinepar = ele['machine_params'] + Thread(target=self.connectPVs).start() + + def connectPVs(self): + self.pvs = [PV(pv, auto_monitor=False) for pv in self.pvnames] + con = [pv.wait_for_connection(timeout=0.2) for pv in self.pvs] + pvscon=[] + for i, val in enumerate(con): + if val is False: + print('Cannot connect to PV:', self.pvs[i].pvname,flush=True) + else: + pvscon.append(self.pvs[i]) + self.pvs = copy.deepcopy(pvscon) + if isinstance(self.machinepar,list): + self.mppvs = [PV(self.machinepar[key], auto_monitor=False) for key in self.machinepar] + else: + self.mppvs = [PV(self.machinepar[key], auto_monitor=False) for key in self.machinepar.keys()] + con = [pv.wait_for_connection(timeout=0.2) for pv in self.mppvs] + pvscon.clear() + for i, val in enumerate(con): + if val is False: + print('Cannot connect to mPV:', self.mppvs[i].pvname,flush=True) + else: + pvscon.append(self.mppvs[i]) + self.mppvs=copy.deepcopy(pvscon) + + def getSnapValues(self, force=True): + values = {} + val = [pv.get(timeout=0.6, use_monitor=False) for pv in self.pvs] + for i, pv in enumerate(self.pvs): + if val[i] is None: + if force: + continue + else: + return False + else: + values[pv.pvname] = { + "raw_name": pv.pvname, "val": val[i], "EGU": pv.units, "prec": pv.precision} + mvalues = {} + val = [pv.get(timeout=0.6, use_monitor=False) for pv in self.mppvs] + for i, pv in enumerate(self.mppvs): + if val[i] is None: + if force: + continue + else: + return False + else: + mvalues[pv.pvname] = {"value": val[i], + "units": pv.units, "precision": pv.precision} + return values, mvalues + + def save(self, labels=[], comment="Generated by SFBD-Package", force=True): + if self.filename is None: + self.message = 'No Request File Loaded' + return False + + val, mval = self.getSnapValues(force) + if isinstance(val, bool) and val == False: + self.message = 'Unsuccesful reading of PV channels (unforced access)' + return False + + # construct file name + datetag = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + root = self.rootname.split('.req')[0] + files = self.savepath+root+'_'+datetag+'.snap' + filel = self.savepath+root+'_latest.snap' + + # reshuffle from mval to keyword based machine values + mmval = {} + for key in self.machinepar.keys(): + if self.machinepar[key] in mval.keys(): + mmval[key] = mval[self.machinepar[key]] + # save file + parse_to_save_file( + val, files, macros=None, symlink_path=filel, comment=comment, + labels=[], + req_file_name=self.rootname, machine_params=mmval) + self.message = 'Snapshot saved to '+files + return True + + def restore(self,filename,refilter='',force=True): + filepath=self.savepath+filename + prog=re.compile(refilter) + save_pvs=parse_from_save_file(filepath) + res={} + for ele in save_pvs: + if isinstance(ele,dict): + for key in ele.keys(): + if prog.match(key): + res[key]=ele[key]['value'] + + for pv in self.pvs: + if pv.pvname in res.keys(): + val=pv.get() + if val is None or np.abs(val-res[pv.pvname]) > self.tolerance: + pv.put(res[pv.pvname]) + self.message ='Snap restored' + +