Compare commits
40 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5a9a849b0d | |||
| d36fbab34d | |||
| 9a030f7a88 | |||
| 1f220ec323 | |||
| 14d37c3e67 | |||
| 14d0db7fad | |||
| 2267992699 | |||
| 26998e15dd | |||
| 100be302e9 | |||
| 1d933a66ce | |||
| a5189537bb | |||
| b221a7f964 | |||
| 127f3ef9db | |||
| 8de87ed1a9 | |||
| 423e724022 | |||
| fa67b2fab9 | |||
| 716d1cbf2a | |||
| 2a0c5cf049 | |||
| f5b6f29efc | |||
| 68cd4d5633 | |||
| 52e126c1af | |||
| bca4098438 | |||
| 964da0a16f | |||
| f2c07f0c98 | |||
| 36b7a2980a | |||
| 5e06e49c45 | |||
| 01be4a5203 | |||
| e603069ffd | |||
| 26eebc650b | |||
| 24cbccb6a6 | |||
| d1fb0b3462 | |||
| ba405e6967 | |||
| a07f1bcf42 | |||
| 2906c9cc44 | |||
| 6fa15dd9d6 | |||
| 36eacba046 | |||
| a0b5ce4c58 | |||
| 4ed4630b8d | |||
| fff0c81070 | |||
| 25e717e132 |
@@ -1,4 +1,5 @@
|
||||
from .adaptiveorbit import AdaptiveOrbit
|
||||
from .adaptiveorbit_new import AdaptiveOrbitNew
|
||||
from .spectralanalysis import SpectralAnalysis
|
||||
from .hero import LaserPower,EnergyModulation
|
||||
from .dispersiontools import Dispersion
|
||||
|
||||
@@ -2,7 +2,7 @@ import time
|
||||
import numpy as np
|
||||
|
||||
from bstrd import BSCache
|
||||
from bstrd.bscache import make_channel_config, is_available
|
||||
from bstrd.bscache import is_available
|
||||
from epics import PV
|
||||
|
||||
class AdaptiveOrbit:
|
||||
@@ -31,31 +31,84 @@ class AdaptiveOrbit:
|
||||
self.ATchy.append('SATUN%2.2d-DBPM%s:Y1' % (bpm,idx))
|
||||
self.bsAT = self.initBSStream([self.ATch0]+self.ATchx+self.ATchy)
|
||||
self.pvAT = self.initPV(self.ATchx)
|
||||
self.kickerAR = self.initPV(['SATMA01-MCRX610:I-SET','SATMA01-MCRY610:I-SET','SATUN05-MCRX420:I-SET','SATUN05-MCRY420:I-SET','SFB_ORBIT_SAT:ONOFF1'])
|
||||
self.kickerAT = self.initPV(['SATMA01-MCRX610:I-SET','SATMA01-MCRY610:I-SET','SATUN05-MCRX420:I-SET','SATUN05-MCRY420:I-SET','SFB_ORBIT_SAT:ONOFF1'])
|
||||
|
||||
# select first beamline
|
||||
self.isAramis = True
|
||||
|
||||
|
||||
def getChannels(self,beamline,bunch):
|
||||
if beamline == 'Aramis':
|
||||
ch0 = ['SARFE10-PBIG050-EVR0:CALCI']
|
||||
chx = ['SARUN%2.2d-DBPM070:X%d' % (bpm,bunch) for bpm in range(1,17)]
|
||||
chy = ['SARUN%2.2d-DBPM070:Y%d' % (bpm,bunch) for bpm in range(1,17)]
|
||||
pvFB = [ch.split[':'][0]+':X-REF-FB' for ch in chx]+[ch.split[':'][0]+':Y-REF-FB' for ch in chx]
|
||||
pvKick=['SARMA02-MCRX050:I-SET','SARMA02-MCRY050:I-SET','SARUN02-MCRX080:I-SET','SARUN02-MCRY080:I-SET','SFB_ORBIT_SAR:ONOFF1']
|
||||
elif beamline == 'Athos':
|
||||
ch0 = ['SATFE10-PEPG046-EVR0:CALCI']
|
||||
chx=[]
|
||||
chy=[]
|
||||
for bpm in range(5,23):
|
||||
idx = '070'
|
||||
if bpm == 5 or bpm ==14:
|
||||
idx='410'
|
||||
chx.append('SATUN%2.2d-DBPM%s:X%d' % (bpm,idx,bunch))
|
||||
chy.append('SATUN%2.2d-DBPM%s:Y%d' % (bpm,idx,bunch))
|
||||
pvFB = [ch.split[':'][0]+':X-REF-FB' for ch in chx]+[ch.split[':'][0]+':Y-REF-FB' for ch in chx]
|
||||
pvKick=['SATMA01-MCRX610:I-SET','SATMA01-MCRY610:I-SET','SATUN05-MCRX420:I-SET','SATUN05-MCRY420:I-SET','SFB_ORBIT_SAT:ONOFF1']
|
||||
else:
|
||||
return None
|
||||
print("Initializing BSStream for beamline: %s and Bunch %d" % (beamline,bunch))
|
||||
self.ch = ch0+chx+chy
|
||||
print("Initializing EPICS Channels")
|
||||
self.PVFB=initPV_new(pvFB)
|
||||
self.PVKick=initPV_new(pvKick)
|
||||
|
||||
def initBSStrem_new(self,channels):
|
||||
ok = True
|
||||
for chn in channels:
|
||||
ok &= is_available(ch)
|
||||
if ok:
|
||||
bs = BSCache(1000,receive_timeout=1000) # 1000 second time out, capazity for 1000 second.
|
||||
bs.get_vars(channels)
|
||||
else:
|
||||
for chn in channels:
|
||||
if not is_available(ch):
|
||||
print('ERROR: BS-Channel %s not found in BS-Dispatcher' % ch)
|
||||
bs = None
|
||||
return bs
|
||||
|
||||
|
||||
def initBSStream(self,channels):
|
||||
print("Initializing BSstream")
|
||||
bs = BSCache(100000,10000) # 1 second time out, capazity for 100 second.
|
||||
# bs.stop()
|
||||
# for cnl in channels[1:]:
|
||||
# if not is_available(cnl):
|
||||
# raise ValueError(f"BS-Channel {cbl} is not available")
|
||||
# res = make_channel_config(cnl,None,None)
|
||||
# bs.channels[res]=res
|
||||
# bs.get_var(channels[0]) # this starts also the stream into the cache
|
||||
bs = BSCache(1000,receive_timeout=1000) # 1000 second time out, capazity for 1000 second.
|
||||
bs.get_vars(channels)
|
||||
return bs
|
||||
|
||||
def intitPV_New(self,chx):
|
||||
pvs = []
|
||||
for x in chx:
|
||||
pvs.append(PV(x))
|
||||
con = [pv.wait_for_connection(timeout=0.2) for pv in pvs]
|
||||
for i, val in enumerate(con):
|
||||
if val is False:
|
||||
name = pv[i].pvname
|
||||
print("ERROR: PV-Channel %s is not available" % name)
|
||||
return pvs
|
||||
|
||||
|
||||
def initPV(self,chx):
|
||||
print("Initializing EPICS Channels")
|
||||
pvs = []
|
||||
for x in chx:
|
||||
pvs.append(PV(x.replace(':X1',':X-REF-FB')))
|
||||
pvs.append(PV(x.replace(':X1',':Y-REF-FB')))
|
||||
if ':X1' in x:
|
||||
pvs.append(PV(x.replace(':X1',':X-REF-FB')))
|
||||
pvs.append(PV(x.replace(':X1',':Y-REF-FB')))
|
||||
elif ':X2' in x:
|
||||
pvs.append(PV(x.replace(':X2',':X-REF-FB')))
|
||||
pvs.append(PV(x.replace(':X2',':Y-REF-FB')))
|
||||
else:
|
||||
pvs.append(PV(x))
|
||||
con = [pv.wait_for_connection(timeout=0.2) for pv in pvs]
|
||||
for i, val in enumerate(con):
|
||||
if val is False:
|
||||
@@ -63,6 +116,8 @@ class AdaptiveOrbit:
|
||||
raise ValueError(f"PV-Channel {name} is not available")
|
||||
return pvs
|
||||
|
||||
|
||||
|
||||
def flush(self):
|
||||
self.bsAR.flush()
|
||||
self.bsAT.flush()
|
||||
@@ -78,7 +133,7 @@ class AdaptiveOrbit:
|
||||
if beamline == 'Aramis':
|
||||
self.isAramis = True
|
||||
else:
|
||||
self.isAthos = True
|
||||
self.isAramis = False
|
||||
|
||||
# all routine for accessing the machine (read/write)
|
||||
|
||||
@@ -100,9 +155,9 @@ class AdaptiveOrbit:
|
||||
if self.isAramis:
|
||||
for i in range(len(fbval)):
|
||||
self.pvAR[i].value = fbval[i]
|
||||
return
|
||||
for i in range(len(fbval)):
|
||||
self.pvAT[i].value = fbval[i]
|
||||
else:
|
||||
for i in range(len(fbval)):
|
||||
self.pvAT[i].value = fbval[i]
|
||||
|
||||
def getPVNames(self):
|
||||
if self.isAramis:
|
||||
@@ -120,7 +175,6 @@ class AdaptiveOrbit:
|
||||
return [pv.value for pv in self.kickerAT]
|
||||
|
||||
def setKicker(self,vals):
|
||||
return
|
||||
if self.isAramis:
|
||||
for i,val in enumerate(vals):
|
||||
self.kickerAR[i].value = val
|
||||
|
||||
137
app/adaptiveorbit_new.py
Normal file
137
app/adaptiveorbit_new.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import time
|
||||
import numpy as np
|
||||
|
||||
from bstrd import BSCache
|
||||
from bstrd.bscache import is_available
|
||||
from epics import PV
|
||||
|
||||
class AdaptiveOrbitNew:
|
||||
"""
|
||||
Wrapper class to bundle all daq/io needed for adaptive orbit feedback.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.BS=None
|
||||
self.PVKick=[]
|
||||
self.PVFB = []
|
||||
self.BSChannels=[]
|
||||
self.BS=None
|
||||
self.PVKick=None
|
||||
self.PVFB=None
|
||||
|
||||
|
||||
def getChannels(self,beamline,bunch):
|
||||
if beamline == 'Aramis':
|
||||
ch0 = ['SARFE10-PBIG050-EVR0:CALCI']
|
||||
chx = ['SARUN%2.2d-DBPM070:X%d' % (bpm,bunch) for bpm in range(1,17)]
|
||||
chy = ['SARUN%2.2d-DBPM070:Y%d' % (bpm,bunch) for bpm in range(1,17)]
|
||||
pvFB = [ch.split(':')[0]+':X-REF-FB' for ch in chx]+[ch.split(':')[0]+':Y-REF-FB' for ch in chx]
|
||||
pvKick=['SARMA02-MCRX050:I-SET','SARMA02-MCRY050:I-SET','SARUN02-MCRX080:I-SET','SARUN02-MCRY080:I-SET','SFB_ORBIT_SAR:ONOFF1']
|
||||
elif beamline == 'Athos':
|
||||
ch0 = ['SATFE10-PEPG046-EVR0:CALCI']
|
||||
chx=[]
|
||||
chy=[]
|
||||
for bpm in range(5,23):
|
||||
idx = '070'
|
||||
if bpm == 5 or bpm ==14:
|
||||
idx='410'
|
||||
chx.append('SATUN%2.2d-DBPM%s:X%d' % (bpm,idx,bunch))
|
||||
chy.append('SATUN%2.2d-DBPM%s:Y%d' % (bpm,idx,bunch))
|
||||
pvFB = [ch.split(':')[0]+':X-REF-FB' for ch in chx]+[ch.split(':')[0]+':Y-REF-FB' for ch in chx]
|
||||
pvKick=['SATMA01-MCRX610:I-SET','SATMA01-MCRY610:I-SET','SATUN05-MCRX420:I-SET','SATUN05-MCRY420:I-SET','SFB_ORBIT_SAT:ONOFF1']
|
||||
else:
|
||||
return False
|
||||
print("Initializing BSStream for beamline %s and Bunch %d" % (beamline,bunch))
|
||||
ch = ch0+chx+chy
|
||||
self.BS = self.initBSStream(ch)
|
||||
if self.BS is None:
|
||||
print('Failed to establish BS-stream')
|
||||
return False
|
||||
print("Initializing EPICS Channels")
|
||||
self.PVFB=self.initPV(pvFB)
|
||||
self.PVKick=self.initPV(pvKick)
|
||||
if self.PVFB is None or self.PVKick is None:
|
||||
print('Failed to establish PV-channels')
|
||||
return False
|
||||
return True
|
||||
|
||||
def initBSStream(self,channels):
|
||||
ok = True
|
||||
for chn in channels:
|
||||
ok &= is_available(chn)
|
||||
if ok:
|
||||
bs = BSCache(1000,receive_timeout=1000) # 1000 second time out, capazity for 1000 second.
|
||||
bs.get_vars(channels)
|
||||
else:
|
||||
for chn in channels:
|
||||
if not is_available(chn):
|
||||
print('ERROR: BS-Channel %s not found in BS-Dispatcher' % ch)
|
||||
bs = None
|
||||
self.BSChannels.clear()
|
||||
self.BSChannels=channels
|
||||
return bs
|
||||
|
||||
def initPV(self,chx):
|
||||
pvs = []
|
||||
for x in chx:
|
||||
pvs.append(PV(x))
|
||||
con = [pv.wait_for_connection(timeout=0.2) for pv in pvs]
|
||||
ok = True
|
||||
for i, val in enumerate(con):
|
||||
ok &=val
|
||||
if val is False:
|
||||
name = pv[i].pvname
|
||||
print("ERROR: PV-Channel %s is not available" % name)
|
||||
if ok:
|
||||
return pvs
|
||||
return None
|
||||
|
||||
|
||||
def flush(self):
|
||||
self.BS.flush()
|
||||
|
||||
def terminate(self):
|
||||
print('Stopping BSStream Thread...')
|
||||
self.BS.stop()
|
||||
self.BS.pt.running.clear() # for some reason I have to
|
||||
|
||||
def read(self):
|
||||
if self.BS is None:
|
||||
return None
|
||||
data=self.BS.__next__()
|
||||
return np.array([data['pid']]+[data[cnl] for cnl in self.BSChannels])
|
||||
|
||||
def readPV(self):
|
||||
if self.PVFB is None:
|
||||
return []
|
||||
return [pv.value for pv in self.PVFB]
|
||||
|
||||
def setPV(self,fbval):
|
||||
if self.PVFB is None:
|
||||
return
|
||||
for i in range(len(fbval)):
|
||||
self.PVFB[i].value = fbval[i]
|
||||
|
||||
def getPVNames(self):
|
||||
if self.PVFB is None:
|
||||
return []
|
||||
return [pv.pvname for pv in self.PVFB]
|
||||
|
||||
|
||||
|
||||
def getDetectorName(self):
|
||||
return self.BSChannels[0]
|
||||
|
||||
def getKicker(self):
|
||||
if self.PVKick is None:
|
||||
return None
|
||||
return [pv.value for pv in self.PVKick]
|
||||
|
||||
def setKicker(self,vals):
|
||||
if self.PVKick is None:
|
||||
return
|
||||
for i,val in enumerate(vals):
|
||||
self.PVKick[i].value = val
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ from slic.core.acquisition import BSAcquisition, PVAcquisition
|
||||
from slic.core.scanner import Scanner
|
||||
from sfbd.ext import CounterAdjustable
|
||||
from sfbd.ext import BSCAcquisition
|
||||
from sfbd.interface import SlicScan
|
||||
from bstrd import BSCache
|
||||
|
||||
def getAux(pvs=None):
|
||||
if not pvs:
|
||||
@@ -19,7 +21,6 @@ def getAux(pvs=None):
|
||||
for i,pv in enumerate(pvs):
|
||||
if val[i]: # filter out None values
|
||||
ret[pv]=float(val[i])
|
||||
epics.ca.clear_cache()
|
||||
return ret
|
||||
|
||||
|
||||
@@ -30,23 +31,33 @@ def getBSChannels(regexp):
|
||||
if prog.match(bs['name']):
|
||||
res.append(bs['name'])
|
||||
return res
|
||||
|
||||
def getRFCalibrationChannels(sensors,energy):
|
||||
aux=[]
|
||||
for sen in sensors:
|
||||
if 'PHASE-VS' in sen:
|
||||
aux.append(sen.replace('PHASE-VS','GET-VSUM-PHASE-OFFSET').replace('RLLE-DSP','RSYS'))
|
||||
aux.append(sen.replace('PHASE-VS','GET-VSUM-AMPLT-SCALE').replace('RLLE-DSP','RSYS'))
|
||||
aux.append(sen.replace('PHASE-VS','SM-GET').replace('RLLE-DSP','RMSM'))
|
||||
aux.append(energy)
|
||||
return aux
|
||||
|
||||
|
||||
class Dispersion:
|
||||
def __init__(self, branch = 'Aramis'):
|
||||
self.scanname = 'Dispersion'
|
||||
self.branch = 'None'
|
||||
self.branch = None
|
||||
|
||||
dirpath= datetime.datetime.now().strftime('measurements/%Y/%m/%d/slic_sfbd')
|
||||
self.basedir = '/sf/data/%s/%s' % (dirpath,self.scanname)
|
||||
|
||||
self.pgroup='%s/%s' % (dirpath,self.scanname)
|
||||
self.scandir='/sf/data/'+self.pgroup
|
||||
self.setBranch(branch)
|
||||
self.sc = None
|
||||
|
||||
self.setBranch(branch) # enfore Athos dump settings for now
|
||||
self.Nsteps = 2
|
||||
self.Nsamples = 1
|
||||
|
||||
|
||||
self.bsc = None
|
||||
|
||||
def setBranch(self,branch = 'Aramis'):
|
||||
self.sc = None
|
||||
if branch == 'Athos Dump':
|
||||
@@ -55,30 +66,112 @@ class Dispersion:
|
||||
self.setupAramis()
|
||||
elif branch == 'Athos':
|
||||
self.setupAthos()
|
||||
elif branch =='Bunch Compressor 2':
|
||||
self.setupBC2()
|
||||
else:
|
||||
self.scanner = None
|
||||
self.branch = None
|
||||
return self.branch
|
||||
|
||||
def setup(self,scl = 1, Nsteps=5, Nsamples=10):
|
||||
# the setup is done from the main thread. Therefore I have to define the BSC here
|
||||
self.N = Nsteps
|
||||
self.Ns= Nsamples
|
||||
self.scale = scl
|
||||
# define stream
|
||||
print('Getting BSCache')
|
||||
self.bsc = BSCache(100000, receive_timeout=10000) # 1000 second timeout, size for 100 second data taken
|
||||
self.bsc.get_vars(self.sensor) # this starts the stream into the cache
|
||||
print('Getting BSCache done')
|
||||
|
||||
def scan(self):
|
||||
# core routine to do all action. Will be a thread of the slic scanner wrapper
|
||||
if not self.branch:
|
||||
return
|
||||
# path is define in the various set-up procedures
|
||||
self.scanner = Scanner(data_base_dir=self.basedir,scan_info_dir=self.basedir,
|
||||
make_scan_sub_dir=True,
|
||||
default_acquisitions=self.acq)
|
||||
|
||||
|
||||
def getRFCalibrationChannels(self,sensor2,energy):
|
||||
aux=[]
|
||||
for sen in sensor2:
|
||||
if 'PHASE-VS' in sen:
|
||||
aux.append(sen.replace('PHASE-VS','GET-VSUM-PHASE-OFFSET').replace('RLLE-DSP','RSYS'))
|
||||
aux.append(sen.replace('PHASE-VS','GET-VSUM-AMPLT-SCALE').replace('RLLE-DSP','RSYS'))
|
||||
aux.append(sen.replace('PHASE-VS','SM-GET').replace('RLLE-DSP','RMSM'))
|
||||
aux.append(energy)
|
||||
return aux
|
||||
# define acquisition channels wrapper for BSQueue
|
||||
pgroup = '%s-%s' % (self.pgroup,self.branch)
|
||||
acq = [BSCAcquisition(".",pgroup, default_channels=[self.bsc])]
|
||||
|
||||
# define the scanner
|
||||
self.scanner = Scanner(data_base_dir=self.basedir, # root directory of data location e.g. /sf/data/2023/02/02/Dispersion
|
||||
scan_info_dir=self.basedir, # write also scan info there
|
||||
make_scan_sub_dir=True, # put each scan in its own directory
|
||||
default_acquisitions=acq) # adjustable to use
|
||||
|
||||
# define adjustable
|
||||
self.adj = PVAdjustable(self.adjSV,pvname_readback = self.adjRB, accuracy = 0.1,ID = self.adjSV, name = self.name)
|
||||
val = self.adj.get_current_value(readback=False)
|
||||
if self.adj2SV:
|
||||
self.adj2 = PVAdjustable(self.adj2SV,pvname_readback = self.adj2RB, accuracy = 0.1,ID = self.adjSV, name = self.name)
|
||||
val2 = self.adj2.get_current_value(readback=False)
|
||||
dval = self.amp*self.scale
|
||||
print(self.adjSV,' - Scan Range:',val-dval,'to',val+dval)
|
||||
if self.adj2SV:
|
||||
print(self.adj2SV,' - Scan Range:',val2+dval,'to',val2-dval)
|
||||
|
||||
# create scanner backend
|
||||
if self.adj2SV:
|
||||
self.sc=self.scanner.a2scan(self.adj, val-dval, val+dval,
|
||||
self.adj2, val2+dval, val2-dval,
|
||||
n_intervals = self.N-1, # steps
|
||||
n_pulses = self.Ns, # samples
|
||||
filename=self.branch, # relative directory for data
|
||||
start_immediately = False, # wait for execution to performe pre-action items
|
||||
return_to_initial_values=True) # return to initial values
|
||||
else:
|
||||
self.sc=self.scanner.ascan(self.adj, val-dval, val+dval,
|
||||
n_intervals = self.N-1, # steps
|
||||
n_pulses = self.Ns, # samples
|
||||
filename=self.branch, # relative directory for data
|
||||
start_immediately = False, # wait for execution to performe pre-action items
|
||||
return_to_initial_values=True) # return to initial values
|
||||
|
||||
# get aux data first
|
||||
self.auxdata = getAux(self.aux)
|
||||
self.preaction()
|
||||
self.sc.run()
|
||||
self.postaction()
|
||||
self.bsc.stop() # stop the queue
|
||||
|
||||
|
||||
|
||||
#################################
|
||||
# definition of the individual branches
|
||||
|
||||
def setupBC2(self):
|
||||
# branch and name tag
|
||||
self.branch='Bunch Compressor 2'
|
||||
self.name ='BC2'
|
||||
|
||||
# pre-scan item - needs an adjustment of the dipole current of about 2.3e-3 - to be implemented later.
|
||||
self.pre = {}
|
||||
self.pre['SFB_COMPRESSION_BC2_AR:ONOFF1']={'Val':0,'InitVal':0}
|
||||
self.pre['SFB_COMPRESSION_BC2_AR:ONOFF2']={'Val':0,'InitVal':0}
|
||||
self.pre['SFB_ORBIT_S10:ONOFF1']={'Val':0,'InitVal':0}
|
||||
for pv in self.pre.keys():
|
||||
self.pre[pv]['adj']=PVAdjustable(pv)
|
||||
|
||||
# adjustable
|
||||
self.adjSV = 'S10:SET-E-GAIN-OP'
|
||||
self.adjRB = 'S10:GET-E-GAIN-OP'
|
||||
self.adj2SV = 'S20:SET-E-GAIN-OP' # counter adjustable
|
||||
self.adj2RB = 'S20:GET-E-GAIN-OP'
|
||||
self.amp = 5 # the amplitude of the scan, which can be scaled
|
||||
|
||||
|
||||
# acquisition
|
||||
sensor1 = getBSChannels('S10[BM].*-DBPM.*:[XY]1$')
|
||||
sensor2 = getBSChannels('S1.*-RLLE-DSP:.*-VS$')
|
||||
self.sensor = sensor1+sensor2
|
||||
|
||||
# auxiliar data to be read one
|
||||
self.aux = getRFCalibrationChannels(sensor2,'SINBC02-MBND100:ENERGY-OP')
|
||||
|
||||
def setupAthosDump(self):
|
||||
# branch and name tag
|
||||
self.branch='Athos_Dump'
|
||||
pgroup = '%s-%s' % (self.pgroup,self.branch)
|
||||
self.path = '/sf/data/'+pgroup
|
||||
self.name ='SATCB01-Linac'
|
||||
|
||||
# pre-scan item - needs an adjustment of the dipole current of about 2.3e-3 - to be implemented later.
|
||||
self.pre = {}
|
||||
self.pre['SFB_BEAM_DUMP_AT:ONOFF1']={'Val':0,'InitVal':0}
|
||||
@@ -87,23 +180,27 @@ class Dispersion:
|
||||
self.pre['SFB_ORBIT_SAT_%2.2d:ONOFF1' % i ]={'Val':0,'InitVal':0}
|
||||
for pv in self.pre.keys():
|
||||
self.pre[pv]['adj']=PVAdjustable(pv)
|
||||
|
||||
# adjustable
|
||||
self.adjSV = 'SATCB01-RSYS:SET-BEAM-PHASE'
|
||||
self.adjRB = 'SATCB01-RSYS:GET-BEAM-PHASE'
|
||||
self.adj = PVAdjustable(self.adjSV,pvname_readback = self.adjRB, accuracy = 0.1,ID = self.adjSV, name = "SATCB01-Linac")
|
||||
self.adj2SV = None
|
||||
self.adj2RB = None
|
||||
self.amp = 30 # the amplitude of the scan, which can be scaled
|
||||
|
||||
# acquisition
|
||||
sensor1 = getBSChannels('SATBD02-DBPM.*:Y2$')
|
||||
sensor2 = getBSChannels('SATCB.*-RLLE-DSP:.*-VS$')
|
||||
self.sensor = sensor1+sensor2
|
||||
self.acq = [BSAcquisition(".",pgroup, default_channels=self.sensor)]
|
||||
|
||||
# auxiliar data to be read one
|
||||
self.aux = self.getRFCalibrationChannels(sensor2,'SATCL01-MBND100:ENERGY-OP')
|
||||
self.aux = getRFCalibrationChannels(sensor2,'SATCL01-MBND100:ENERGY-OP')
|
||||
|
||||
def setupAramis(self):
|
||||
# branch and name tag
|
||||
self.branch='Aramis'
|
||||
pgroup = '%s-%s' % (self.pgroup,self.branch)
|
||||
self.path = '/sf/data/'+pgroup
|
||||
self.name = 'Linac3'
|
||||
|
||||
# pre-scan item
|
||||
self.pre = {}
|
||||
self.pre['SFB_BEAM_DUMP_AR:ONOFF1']={'Val':0,'InitVal':0}
|
||||
@@ -112,23 +209,28 @@ class Dispersion:
|
||||
self.pre['SFB_ORBIT_SAR:ONOFF1']={'Val':0,'InitVal':0}
|
||||
for pv in self.pre.keys():
|
||||
self.pre[pv]['adj']=PVAdjustable(pv)
|
||||
|
||||
# adjustable
|
||||
self.adjSV = 'S30:SET-E-GAIN-OP'
|
||||
self.adjRB = 'S30:GET-E-GAIN-OP'
|
||||
self.adj = PVAdjustable(self.adjSV,pvname_readback = self.adjRB, accuracy = 0.1,ID = self.adjSV, name = "Linac3")
|
||||
self.adj2SV = None
|
||||
self.adj2RB = None
|
||||
self.amp = 20 # the amplitude of the scan, which can be scaled
|
||||
|
||||
# acquisition
|
||||
sensor1 = getBSChannels('SAR.*DBPM.*:[XY]1$')
|
||||
sensor1 = getBSChannels('SAR[CMU].*DBPM.*:[XY]1$')
|
||||
sensor2 = getBSChannels('S[23].*-RLLE-DSP:.*-VS$')
|
||||
self.sensor = sensor1+sensor2
|
||||
self.acq = [BSAcquisition(".",pgroup, default_channels=self.sensor)]
|
||||
|
||||
# auxiliar data to be read one
|
||||
self.aux = self.getRFCalibrationChannels(sensor2,'S10BC02-MBND100:ENERGY-OP')
|
||||
self.aux = getRFCalibrationChannels(sensor2,'S10BC02-MBND100:ENERGY-OP')
|
||||
|
||||
|
||||
def setupAthos(self):
|
||||
self.branch='Aramis'
|
||||
pgroup = '%s-%s' % (self.pgroup,self.branch)
|
||||
self.path = '/sf/data/'+pgroup
|
||||
# branch and name tag
|
||||
self.branch='Athos'
|
||||
self.name = 'Linac2+3'
|
||||
|
||||
# pre-scan item
|
||||
self.pre = {}
|
||||
self.pre['SFB_BEAM_DUMP_AT:ONOFF1']={'Val':0,'InitVal':0}
|
||||
@@ -139,31 +241,25 @@ class Dispersion:
|
||||
self.pre['SFB_ORBIT_SAT_%2.2d:ONOFF1' % i ]={'Val':0,'InitVal':0}
|
||||
for pv in self.pre.keys():
|
||||
self.pre[pv]['adj']=PVAdjustable(pv)
|
||||
|
||||
# adjustable
|
||||
self.adjSV = 'S20:SET-E-GAIN-OP'
|
||||
self.adjRB = 'S20:GET-E-GAIN-OP'
|
||||
self.adj = PVAdjustable(self.adjSV,pvname_readback = self.adjRB, accuracy = 0.1,ID = self.adjSV, name = "Linac 2 and 3")
|
||||
# self.adj2SV = 'S30:SET-E-GAIN-OP'
|
||||
# self.adj2RB = 'S30:GET-E-GAIN-OP'
|
||||
# self.adj2 = PVAdjustable(self.adj2SV,pvname_readback = self.adj2RB, accuracy = 0.1)
|
||||
# self.adj = CounterAdjustable(self.adj1,self.adj2) # combine the two channels
|
||||
self.amp = 10 # the amplitude of the scan, which can be scaled
|
||||
self.adj2SV = 'S30:SET-E-GAIN-OP' # counter adjustable
|
||||
self.adj2RB = 'S30:GET-E-GAIN-OP'
|
||||
self.amp = 5 # the amplitude of the scan, which can be scaled
|
||||
|
||||
# acquisition
|
||||
sensor1 = getBSChannels('SAT[SDC].*DBPM.*:[XY]2$')
|
||||
sensor2 = getBSChannels('S[2].*-RLLE-DSP:.*-VS$')
|
||||
self.sensor = sensor1+sensor2
|
||||
self.acq = [BSAcquisition(".",pgroup, default_channels=self.sensor)]
|
||||
|
||||
# auxiliar data to be read one
|
||||
self.aux = self.getRFCalibrationChannels(sensor2,'S10BC02-MBND100:ENERGY-OP')
|
||||
self.aux = getRFCalibrationChannels(sensor2,'S10BC02-MBND100:ENERGY-OP')
|
||||
|
||||
|
||||
def setup(self,scl = 1, Nsteps=5, Nsamples=5):
|
||||
val = self.adj.get_current_value(readback=False)
|
||||
# dval = self.amp*scl
|
||||
dval = 0 ######## edit this
|
||||
self.N = Nsteps
|
||||
self.Ns= Nsamples
|
||||
self.values=np.linspace(val-dval,val+dval,num=self.N)
|
||||
#########################
|
||||
# some basic interaction with the scan functionality
|
||||
|
||||
|
||||
def preaction(self):
|
||||
@@ -174,40 +270,44 @@ class Dispersion:
|
||||
def postaction(self):
|
||||
for key in self.pre.keys():
|
||||
self.pre[key]['adj'].set_target_value(self.pre[key]['InitVal'])
|
||||
|
||||
|
||||
def scan(self):
|
||||
self.sc=self.scanner.ascan_list(self.adj,self.values,
|
||||
filename=self.branch,start_immediately = False,
|
||||
n_pulses=self.Ns,return_to_initial_values=True)
|
||||
# self.preaction() ######
|
||||
self.sc.run()
|
||||
# self.auxdata = getAux(self.aux)
|
||||
# self.postaction() #######
|
||||
|
||||
|
||||
|
||||
def stop(self):
|
||||
if self.sc is None:
|
||||
return
|
||||
self.sc.stop()
|
||||
|
||||
def running(self):
|
||||
if self.sc is None:
|
||||
return False
|
||||
return self.sc.running
|
||||
|
||||
def status(self):
|
||||
if self.sc is None:
|
||||
return 0,0
|
||||
si = self.sc.scan_info.to_dict()
|
||||
steps = 0
|
||||
if 'scan_values' in si:
|
||||
steps=len(si['scan_values'])
|
||||
steps=len(si['scan_values'])
|
||||
return steps,self.N
|
||||
|
||||
def info(self):
|
||||
if self.sc is None:
|
||||
return None
|
||||
return self.sc.scan_info.to_dict()
|
||||
|
||||
|
||||
|
||||
#--------------------
|
||||
# main implementation for debugging
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
daq = Dispersion()
|
||||
daq.setup(1,10,100)
|
||||
# threaded execution
|
||||
scanner=SlicScan()
|
||||
scanner.start(daq,True)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
5
app/runDispersionTools.sh
Executable file
5
app/runDispersionTools.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
export PYTHONPATH=/sf/bd/packages/slic:/sf/bd/packages/bstrd:$PYTHONPATH
|
||||
python dispersiontools.py
|
||||
|
||||
@@ -1,23 +1,54 @@
|
||||
import time
|
||||
import numpy as np
|
||||
|
||||
from bstrd import BSCache
|
||||
from epics import PV
|
||||
|
||||
class SpectralAnalysis:
|
||||
"""
|
||||
Wrapper class to bundle all daq/io needed for adaptive orbit feedback.
|
||||
"""
|
||||
def __init__(self):
|
||||
athos_uncalibrated = 'SATFE10-PEPG046-EVR0:CALCI'
|
||||
athos_calibrated = 'SATFE10-PEPG046:FCUP-INTENSITY-CAL'
|
||||
aramis_uncalibrated = 'SARFE10-PBIG050-EVR0:CALCI'
|
||||
aramis_calibrated = 'SARFE10-PBPG050:HAMP-INTENSITY-CAL'
|
||||
eehg_spectrometer = 'SSL2-CPCW-SPEC01:SPECTRUM'
|
||||
channel_dict = {
|
||||
'PSSS': ['SARFE10-PSSS059:SPECTRUM_Y', 'SARFE10-PSSS059:SPECTRUM_X'],
|
||||
'PMOS Maloja': ['SATOP21-PMOS127-2D:SPECTRUM_Y', athos_uncalibrated, athos_calibrated],
|
||||
'PMOS Furka': ['SATOP31-PMOS132-2D:SPECTRUM_Y', 'SATOP31-PMOS132-2D:SPECTRUM_X', athos_uncalibrated, athos_calibrated],
|
||||
'PSSS LB': ['SARFE10-PSSS059-LB:SPECTRUM_Y'],
|
||||
'PSSS incl gasd': ['SARFE10-PSSS059:SPECTRUM_Y', 'SARFE10-PSSS059:SPECTRUM_X', aramis_uncalibrated, aramis_calibrated],
|
||||
'PSSS LB incl gasd': ['SARFE10-PSSS059-LB:SPECTRUM_Y', aramis_uncalibrated, aramis_calibrated],
|
||||
'PMOS Maloja EEHG': ['SATOP21-PMOS127-2D:SPECTRUM_Y', eehg_spectrometer],
|
||||
'PMOS Furka EEHG': ['SATOP31-PMOS132-2D:SPECTRUM_Y', eehg_spectrometer],
|
||||
'Maloja end station': ['SATES24-CAMS161-M1.projection_signal', athos_uncalibrated, athos_calibrated],
|
||||
}
|
||||
names = ['PSSS', 'PMOS Maloja', 'PMOS Furka', 'PSSS LB', 'PSSS incl gasd', 'PSSS LB incl gasd', 'PMOS Maloja EEHG','PMOS Furka EEHG', 'Maloja end station']
|
||||
|
||||
self.bs = BSCache(100000,10000) # 100 second timeout, size for 100 second data taken
|
||||
|
||||
class SpectralAnalysisBase:
|
||||
def connect_x_axis(self):
|
||||
try:
|
||||
self.pv = PV(self.channel[0].replace('_Y','_X'))
|
||||
except Exception as e:
|
||||
print('could not read X axis PV')
|
||||
print(e)
|
||||
self.pv = None
|
||||
|
||||
def read_spectrum_axis(self):
|
||||
if self.pv:
|
||||
return self.pv.value
|
||||
else:
|
||||
return None
|
||||
|
||||
def getSpectrometerName(self):
|
||||
return self.channel
|
||||
|
||||
class SpectralAnalysis(SpectralAnalysisBase):
|
||||
def __init__(self):
|
||||
self.bs = BSCache(100000,receive_timeout=10000) # 100 second timeout, size for 10 second data taken
|
||||
self.bs.stop()
|
||||
|
||||
self.channel = ''
|
||||
self.channels = ['SARFE10-PSSS059:SPECTRUM_Y',
|
||||
'SATOP21-PMOS127-2D:SPECTRUM_Y',
|
||||
'SATOP31-PMOS132-2D:SPECTRUM_Y']
|
||||
self.isConnected = False
|
||||
self.channel = None
|
||||
self.channels = [channel_dict[x] for x in names]
|
||||
self.hasBStream=False
|
||||
|
||||
def connect_name(self, name):
|
||||
index = names.index(name)
|
||||
self.connect(index)
|
||||
|
||||
def connect(self,ich):
|
||||
if ich < 0 or ich >= len(self.channels):
|
||||
@@ -25,25 +56,68 @@ class SpectralAnalysis:
|
||||
self.channel = self.channels[ich]
|
||||
print('Connecting to BS-Channel:',self.channel)
|
||||
self.bs.channels.clear()
|
||||
self.bs.get_var(self.channel) # this starts the stream into the cache
|
||||
self.pv = PV(self.channel.replace('_Y','_X'))
|
||||
|
||||
self.hasBStream=True
|
||||
try:
|
||||
self.bs.get_vars(self.channel) # this starts the stream into the cache
|
||||
except ValueError:
|
||||
print('Cannot find requested channels in BS stream')
|
||||
self.hasBStream=False
|
||||
self.pv = None
|
||||
#self.connect_x_axis()
|
||||
|
||||
def terminate(self):
|
||||
print('Stopping BSStream Thread...')
|
||||
print('Stopping BSStream Thread...')
|
||||
self.bs.stop()
|
||||
self.bs.pt.running.clear() # for some reason I have to
|
||||
if self.pv:
|
||||
self.pv.disconnect()
|
||||
|
||||
def flush(self):
|
||||
self.bs.flush()
|
||||
|
||||
def read(self):
|
||||
data=self.bs.__next__()
|
||||
return data['pid'],data[self.channel]
|
||||
return next(self.bs)
|
||||
|
||||
def readPV(self):
|
||||
return self.pv.value
|
||||
class SpectralAnalysis2(SpectralAnalysisBase):
|
||||
def __init__(self):
|
||||
self.bs1 = BSCache(100000, receive_timeout=10000)
|
||||
self.bs2 = BSCache(100000, receive_timeout=10000)
|
||||
self.bs1.stop()
|
||||
self.bs2.stop()
|
||||
self.hasBStream=False
|
||||
|
||||
def getSpectrometerName(self):
|
||||
return self.channel
|
||||
def connect_name(self, name):
|
||||
print('Connect_name called')
|
||||
channels = channel_dict[name]
|
||||
self.channel = channels
|
||||
self.bs1.channels.clear()
|
||||
self.hasBStream = True
|
||||
try:
|
||||
self.bs1.get_vars(channels[:1]) # this starts the stream into the cache
|
||||
except ValueError:
|
||||
print('Cannot find requested channel %s in BS stream' % channels[0])
|
||||
self.hasBStream=False
|
||||
if len(channels) > 1:
|
||||
try:
|
||||
self.bs2.get_vars(channels[1:]) # this starts the stream into the cache
|
||||
except ValueError:
|
||||
print('Cannot find requested channel %s in BS stream' % channels[1:])
|
||||
self.hasBStream=False
|
||||
self.pv = None
|
||||
#self.connect_x_axis()
|
||||
|
||||
def flush(self):
|
||||
for _bs in self.bs1, self.bs2:
|
||||
_bs.flush()
|
||||
|
||||
def read(self):
|
||||
return next(self.bs1), next(self.bs2)
|
||||
|
||||
def terminate(self):
|
||||
print('Stopping BSStream Thread...')
|
||||
for _bs in self.bs1, self.bs2:
|
||||
_bs.stop()
|
||||
_bs.pt.running.clear() # for some reason I have to
|
||||
if self.pv:
|
||||
self.pv.disconnect()
|
||||
|
||||
|
||||
|
||||
@@ -14,14 +14,25 @@ class XTCAVStabilizer:
|
||||
# the PV
|
||||
self.PVTCAV = PV('SATMA02-RMSM:SM-GET',connection_timeout=0.9, callback=self.stateChanged)
|
||||
self.state=self.PVTCAV.value == 9
|
||||
|
||||
self.PVVolt = PV('SATMA02-RSYS:SET-ACC-VOLT')
|
||||
self.PVPhase = PV('SATMA02-RSYS:SET-BEAM-PHASE')
|
||||
|
||||
# the BS channels
|
||||
self.bs = BSCache(100000,10000) # 100 second timeout, size for 100 second data taken
|
||||
self.bs = BSCache(100000, receive_timeout=10000) # 100 second timeout, size for 10 second data taken
|
||||
self.channels = ['SATBD02-DBPM040:X2','SATMA02-RLLE-DSP:PHASE-VS','SATBD02-DBPM040:X2-VALID']
|
||||
self.validation = self.channels[2]
|
||||
self.bs.get_vars(self.channels) # this starts the stream into the cache
|
||||
self.bs.stop()
|
||||
|
||||
def getPhase(self):
|
||||
return self.PVPhase.value
|
||||
|
||||
def setPhase(self,phi):
|
||||
self.PVPhase.set(phi)
|
||||
|
||||
def getVoltage(self):
|
||||
return self.PVVolt.value
|
||||
|
||||
def stateChanged(self,pvname=None, value=0, **kws):
|
||||
self.state = value == 9
|
||||
|
||||
|
||||
@@ -2,3 +2,4 @@ from .magnet import Magnet
|
||||
from .camacquisition import CamAcquisition
|
||||
from .counteradjustable import CounterAdjustable
|
||||
from .bscacquisition import BSCAcquisition
|
||||
from .pvcombiadjustable import PVCombiAdjustable
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
|
||||
import h5py
|
||||
import numpy as np
|
||||
#import numpy as np
|
||||
|
||||
from slic.core.acquisition.acquisition import Acquisition
|
||||
|
||||
@@ -8,33 +7,38 @@ from slic.core.acquisition.acquisition import Acquisition
|
||||
# class using the BSQueue to avoid to reestablish a stream for each step.
|
||||
|
||||
class BSCAcquisition(Acquisition):
|
||||
|
||||
|
||||
def __init__(self, bscache, *args, use_channels=None, **kwargs):
|
||||
self.bscache = bscache
|
||||
self.use_channels = use_channels
|
||||
self.grp = 0
|
||||
super().__init__(*args, default_channels= self.use_channels or self.bscache.channels.keys(), **kwargs)
|
||||
|
||||
def setGroup(self,idx):
|
||||
self.grp = idx
|
||||
|
||||
def _acquire(self, filename, channels=None, data_base_dir=None, scan_info=None, n_pulses=100, **kwargs):
|
||||
|
||||
queue =channels[0] # abusing interface since BSAcquisition assume a list of channels
|
||||
|
||||
# allocating space
|
||||
data={}
|
||||
chns = queue.channels
|
||||
for chn in chns:
|
||||
data[chn]={'data':np.zeros((n_pulses))}
|
||||
data['pulse_id']=np.zeros((n_pulses))
|
||||
|
||||
# clear the queue
|
||||
queue.flush()
|
||||
self.bscache.flush()
|
||||
data = []
|
||||
for i in range(n_pulses):
|
||||
msg = queue.__next__() # pull data from cache
|
||||
for chn in chns:
|
||||
data[chn]['data'][i] = msg[chn]
|
||||
data['pulse_id'][i]=msg['pid']
|
||||
data.append(next(self.bscache))
|
||||
|
||||
# write out the data file
|
||||
hid = h5py.File(filename,'w')
|
||||
hid.create_dataset('pulse_id', data = data['pulse_id'])
|
||||
for chn in chns:
|
||||
gid = hid.create_group(chn)
|
||||
for key in data[chn].keys():
|
||||
gid.create_dataset(key, data = data[chn][key])
|
||||
hid.close()
|
||||
|
||||
|
||||
use_channels = self.use_channels or self.bscache.channels.keys()
|
||||
with h5py.File(filename,'a') as hid:
|
||||
# save the pulse ID
|
||||
singledata = [ele['pid'] for ele in data]
|
||||
pidname = 'pulse_id/group%d' % self.grp
|
||||
hid.create_dataset(pidname, data=singledata)
|
||||
for chn in use_channels:
|
||||
singledata = []
|
||||
for ele in data:
|
||||
if chn in ele:
|
||||
singledata.append(ele[chn])
|
||||
else:
|
||||
print('%s not in data!' % chn)
|
||||
if not chn == 'pid':
|
||||
dname = chn.replace(':','/')+'/data'
|
||||
hid.create_dataset(dname, data=singledata)
|
||||
dpid = dname.replace('/data','/pid')
|
||||
hid[dpid] = hid[pidname]
|
||||
42
ext/pvcombiadjustable.py
Normal file
42
ext/pvcombiadjustable.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from slic.core.adjustable import Adjustable,DummyAdjustable,PVAdjustable
|
||||
import numpy as np
|
||||
|
||||
class PVCombiAdjustable(Adjustable):
|
||||
def __init__(self, adjustables,values):
|
||||
self.adjustables = adjustables
|
||||
self.values = values # array or indiviual array values
|
||||
|
||||
|
||||
def set_target_value(self, value):
|
||||
print('##### Setvalue:',value)
|
||||
# check if values are in range
|
||||
for i,adj in enumerate(self.adjustables):
|
||||
idx = int(value[i])
|
||||
print('### Index',i, idx)
|
||||
val=self.values[i][idx]
|
||||
print('### Setting',adj.name,'to',val)
|
||||
adj.set_target_value(val)
|
||||
|
||||
@property
|
||||
def units(self):
|
||||
return ['m' for adj in self.adjustables]
|
||||
# return [adj.units() for adj in self.adjustables if adj is not None]
|
||||
|
||||
|
||||
def get_current_value(self, readback=True):
|
||||
return [adj.get_current_value() for adj in self.adjustables] # this is different for dummy elements
|
||||
|
||||
def stop(self):
|
||||
for adj in self.adjustbale:
|
||||
adj.stop()
|
||||
|
||||
def is_moving(self):
|
||||
return any([adj.is_moving() for adj in self.adjustables])
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
from .snap import getSnap
|
||||
from .snap import getSnapPV
|
||||
from .snap import getSnapVal
|
||||
from .snap import saveSnap
|
||||
from .snap import loadSnap
|
||||
from .snap import parseSnapShotReqYAML
|
||||
from .save import saveDataset
|
||||
from .save import getDatasetFileName
|
||||
from .load import loadDataset
|
||||
from .elog import writeElog
|
||||
from .slic import SlicScan
|
||||
from .slic import importSlicScan
|
||||
from .doocs import doocsread
|
||||
from .doocs import doocswrite
|
||||
|
||||
|
||||
33
interface/doocs.py
Normal file
33
interface/doocs.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import subprocess
|
||||
|
||||
|
||||
def doocsread(mystr):
|
||||
""" mystr input can be a string or a list """
|
||||
if isinstance(mystr, list): # it's a list
|
||||
readCmd = ' '.join(mystr)
|
||||
else:
|
||||
readCmd = mystr
|
||||
cmd = ['doocsget', '-c', readCmd]
|
||||
MyOut = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = MyOut.communicate()
|
||||
if b'result->error()' in stdout.split():
|
||||
print('ERROR: reading error')
|
||||
print(mystr)
|
||||
return
|
||||
result = [element.decode('utf-8') for element in stdout.split()]
|
||||
if len(result) == 1: # one element only
|
||||
try:
|
||||
return float(result[0])
|
||||
except Exception as e:
|
||||
return result[0]
|
||||
else:
|
||||
try:
|
||||
return [float(x) for x in result]
|
||||
except Exception as e:
|
||||
return result
|
||||
|
||||
|
||||
def doocswrite(mystr, target):
|
||||
cmd = ['doocsput', '-c', mystr, '-d', str(target)]
|
||||
MyOut = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = MyOut.communicate()
|
||||
@@ -3,11 +3,26 @@ import h5py
|
||||
|
||||
def loadDataset(filename):
|
||||
hid = h5py.File(filename, "r")
|
||||
icount = 0
|
||||
snap = loadSnap(hid)
|
||||
data = loadData(hid)
|
||||
act = loadActuator(hid)
|
||||
data={}
|
||||
act= {}
|
||||
maxID = 0
|
||||
for key in hid.keys():
|
||||
if 'scan' in key:
|
||||
ID = int(key.split('_')[1])
|
||||
if ID > maxID:
|
||||
maxID = ID
|
||||
print('Reading scan_%d' % ID)
|
||||
data[ID]=loadData(hid,ID)
|
||||
act[ID]=loadActuator(hid,ID)
|
||||
icount +=1
|
||||
hid.close()
|
||||
return data,act,snap
|
||||
if icount == 0:
|
||||
return None, None, None
|
||||
elif icount == 1:
|
||||
return data[0], act[0],snap
|
||||
return [data[j] for j in range(maxID+1)],[act[j] for j in range(maxID+1)],snap
|
||||
|
||||
def loadSnap(hid):
|
||||
snap={}
|
||||
|
||||
153
interface/magnet.py
Normal file
153
interface/magnet.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from time import sleep
|
||||
from epics import PV,caget_many, caput_many
|
||||
import numpy as np
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
#from onlinemodel.interface import SwissFELMagnet
|
||||
|
||||
class Magnet:
|
||||
|
||||
def __init__(self, beamline = 'ARAMIS',debug=False, signal = None, cal = None):
|
||||
self.beamline = beamline
|
||||
self.debug=debug
|
||||
self.signal = signal
|
||||
self.progress = None
|
||||
# self.cal = SwissFELMagnet()
|
||||
# getting the array
|
||||
temp = np.sort(np.array(list(range(0, 11)) + [0.5, 1.5]))[::-1]
|
||||
temp2 = -1. * temp
|
||||
cyclingCurrents = np.ones(len(temp) * 2) * 0.
|
||||
cyclingCurrents[::2] = temp
|
||||
cyclingCurrents[1::2] = temp2
|
||||
self.currents = cyclingCurrents[:-1]
|
||||
self.cyclingPause = 3
|
||||
self.pvcor = []
|
||||
self.abort = False
|
||||
|
||||
def doAbort(self):
|
||||
self.abort = True
|
||||
|
||||
def setMagnetI(self,maglist,vals):
|
||||
if self.debug:
|
||||
for i,ele in enumerate(maglist):
|
||||
print('DEBUG: Setting %s to: %f A' % (ele,vals[i]))
|
||||
return
|
||||
caput_many(maglist,vals)
|
||||
|
||||
def setMagnets(self,maglist,magbd,erg):
|
||||
for mag in maglist.keys():
|
||||
Itar = self.cal.QuadrupoleKL2I(magbd[mag],maglist[mag],erg*1e6)
|
||||
pv = PV('%s:I-SET' % mag)
|
||||
if not Itar is None:
|
||||
if self.debug:
|
||||
self.signal.emit('DEBUG: Setting %s to %f' % (pv.pvname,Itar))
|
||||
else:
|
||||
pv.value = Itar
|
||||
|
||||
def scaleMagnets(self,maglist,magbg,EProfCur,EProfTar):
|
||||
# get current from magnets
|
||||
for i,mag in enumerate(maglist):
|
||||
if 'SAR' in mag:
|
||||
ecur = EProfCur[-1]
|
||||
etar = EProfTar[-1]
|
||||
elif 'S30' in mag:
|
||||
idx = int(mag[5:7])
|
||||
if idx >= len(EProfCur):
|
||||
ecur = EProfCur[-1]
|
||||
etar = EProfTar[-1]
|
||||
else:
|
||||
ecur = EProfCur[idx]
|
||||
etar = EProfTar[idx]
|
||||
if 'MQUA' in mag:
|
||||
pv = PV('%s:I-SET' % mag)
|
||||
valmag = pv.value
|
||||
strength = self.cal.QuadrupoleI2KL(magbg[i],valmag,ecur*1e6)
|
||||
Itar = self.cal.QuadrupoleKL2I(magbg[i],strength,etar*1e6)
|
||||
elif 'MSEX' in mag:
|
||||
pv = PV('%s:I-SET' % mag)
|
||||
valmag = pv.value
|
||||
strength = self.cal.SextupoleI2KL(magbg[i],valmag,ecur*1e6)
|
||||
Itar = self.cal.SextupoleKL2I(magbg[i],strength,etar*1e6)
|
||||
elif 'MBND' in mag:
|
||||
pv = PV('%s:P-SET' % mag)
|
||||
valmag=pv.value
|
||||
strength = pv.value
|
||||
Itar = valmag-EProfCur[-1]+etar
|
||||
else:
|
||||
strength = 0
|
||||
if not Itar is None:
|
||||
if self.debug:
|
||||
self.signal.emit('DEBUG: Setting %s to %f' % (pv.pvname,Itar))
|
||||
else:
|
||||
pv.value = Itar
|
||||
|
||||
def cycleCorrector(self,QuadList=[],progress=None,done = None):
|
||||
self.progress = progress
|
||||
self.done = done # signal to indicate end of thread
|
||||
self.pvcor = [PV(ele.replace('MQUA','MCRX')) for ele in QuadList]+[PV(ele.replace('MQUA','MCRY')) for ele in QuadList]
|
||||
self.abort = False
|
||||
Thread(name='CycleCor',target=self.TcycleCorrector).start() # starting thread which starts and monitors actual thread
|
||||
|
||||
def TcycleCorrector(self):
|
||||
self.running = True
|
||||
istep = 0
|
||||
nstep = len(self.currents)
|
||||
while self.running:
|
||||
if self.progress:
|
||||
self.progress.emit(istep,nstep)
|
||||
if self.signal:
|
||||
self.signal.emit('Setting corrector current to %5.1f A' % self.currents[istep])
|
||||
if self.debug:
|
||||
print('DEBUG: Setting Corrector Magnets to:',self.currents[istep])
|
||||
else:
|
||||
for pv in self.pvcor:
|
||||
pv.value = cyclingCurrents[istep]
|
||||
sleep(self.cyclingPause)
|
||||
istep+=1
|
||||
if istep == nstep or self.abort:
|
||||
self.running = False
|
||||
if self.progress:
|
||||
self.progress.emit(nstep,nstep)
|
||||
if self.done:
|
||||
self.done.emit(not self.abort)
|
||||
|
||||
|
||||
|
||||
def cycleMagnets(self,maglist, progress = None, done = None):
|
||||
if len(maglist) == 0:
|
||||
done.emit(True)
|
||||
return -1
|
||||
self.progress = progress
|
||||
self.done = done # signal to indicate end of thread
|
||||
state = caget_many(['%s:CYCLE-STATE' % mag for mag in maglist])
|
||||
magcyc = [mag for i,mag in enumerate(maglist) if not state[i] == 2]
|
||||
time = caget_many(['%s:CYCLE-PROGFULL' % mag for mag in magcyc])
|
||||
if len(time) == 0:
|
||||
done.emit(True)
|
||||
return -1 # no cycling needed
|
||||
imax = np.argmax(time)
|
||||
self.tcyc = time[imax]
|
||||
print('Cycling Time:',self.tcyc,'sec for magnet',magcyc[imax])
|
||||
if self.debug:
|
||||
print('DEBUG: Cycle-SET magnets')
|
||||
else:
|
||||
caput_many(['%s:CYCLE' % mag for mag in magcyc],[2]*len(magcyc))
|
||||
self.abort = False
|
||||
Thread(name='CycleMag',target=self.TcycleMagnets).start()
|
||||
return self.tcyc
|
||||
|
||||
def TcycleMagnets(self):
|
||||
self.running = True
|
||||
tcur = 0
|
||||
while self.running:
|
||||
sleep(1)
|
||||
tcur += 1
|
||||
if self.progress:
|
||||
self.progress.emit(tcur,self.tcyc)
|
||||
if tcur >= self.tcyc or self.abort:
|
||||
self.running = False
|
||||
if self.progress:
|
||||
self.progress.emit(self.tcyc,self.tcyc)
|
||||
if self.done:
|
||||
self.done.emit(not self.abort)
|
||||
112
interface/reprate.py
Normal file
112
interface/reprate.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from time import sleep
|
||||
from epics import PV
|
||||
import numpy as np
|
||||
|
||||
class RepRate:
|
||||
def __init__(self, beamline = 'ARAMIS',debug=False):
|
||||
self.beamline = beamline
|
||||
self.debug=debug
|
||||
self.pv={}
|
||||
self.pv['BeamDelayAR'] = PV('SIN-TIMAST-TMA:Bunch-1-OnDelay-Sel')
|
||||
self.pv['BeamDelayAT'] = PV('SIN-TIMAST-TMA:Bunch-2-OnDelay-Sel')
|
||||
self.pv['RFDelay'] = PV('SIN-TIMAST-TMA:Beam-RF-OnDelay-Sel')
|
||||
self.pv['StopperAR'] = PV('SARMA02-MBNP100:REQUEST')
|
||||
self.pv['StopperAT'] = PV('SATMA01-MBNP100:REQUEST')
|
||||
self.pv['StopperAROut'] = PV('SARMA02-MBNP100:PLC_MOV_OUT')
|
||||
self.pv['StopperATOut'] = PV('SATMA01-MBNP100:PLC_NOV_OUT')
|
||||
self.pv['BeamRRAR'] = PV('SIN-TIMAST-TMA:Bunch-1-Freq-Sel')
|
||||
self.pv['BeamRRAT'] = PV('SIN-TIMAST-TMA:Bunch-2-Freq-Sel')
|
||||
self.pv['applyTiming'] = PV('SIN-TIMAST-TMA:Beam-Apply-Cmd.PROC')
|
||||
self.rrReal = np.array([100., 50., 33.3, 25., 16.6, 12.5, 10., 8.3, 5., 2.5, 1.])
|
||||
self.rrId = np.arange(0, 11)
|
||||
|
||||
|
||||
def setStopper(self,doinsert):
|
||||
if self.beamline == 'ARAMIS':
|
||||
pv = self.pv['StopperAR']
|
||||
else:
|
||||
pv = self.pv['StopperAT']
|
||||
if self.debug:
|
||||
print('DEBUG: Setting beam stopper to:',doinsert)
|
||||
return
|
||||
if doinsert:
|
||||
pv.value = 1
|
||||
else:
|
||||
pv.value = 0
|
||||
|
||||
def isStopperOpen(self):
|
||||
if self.beamline == 'ARAMIS':
|
||||
return self.pv['StopperAROut']
|
||||
else:
|
||||
return self.pv['StopperATOut']
|
||||
|
||||
|
||||
def stopSF(self):
|
||||
if self.debug:
|
||||
print('DEBUG: Stopping Machine disabled')
|
||||
return
|
||||
if self.beamline == 'ARAMIS':
|
||||
self.pv['BeamDelayAR'].value = 1
|
||||
self.pv['BeamDelayAT'].value = 1
|
||||
sleep(0.1)
|
||||
self.pv['RFDelay'].value =1
|
||||
sleep(0.1)
|
||||
self.pv['applyTiming'].value = 1
|
||||
|
||||
def getRR(self):
|
||||
if self.beamline == 'ARAMIS':
|
||||
return self.rrReal[self.pv['BeamRRAR'].value]
|
||||
else:
|
||||
return self.rrReal[self.pv['BeamRRAT'].value]
|
||||
|
||||
|
||||
def setRR(self,rr=1.):
|
||||
"""
|
||||
Possible RR as of 14/01/2020
|
||||
0 100.00 Hz
|
||||
1 50.00 Hz
|
||||
2 33.33 Hz
|
||||
3 25.00 Hz
|
||||
4 16.66 Hz
|
||||
5 12.50 Hz
|
||||
6 10.00 Hz
|
||||
7 8.33 Hz
|
||||
8 5.00 Hz
|
||||
9 2.50 Hz
|
||||
10 1.00 Hz
|
||||
11 Bunch Off
|
||||
"""
|
||||
|
||||
findRR = abs(self.rrReal - rr) < 0.1
|
||||
if findRR.sum(): # at least one rr is matching
|
||||
rrSel = self.rrId[np.argmax(findRR)]
|
||||
else:
|
||||
print('Requested rep. rate %.3f is not available' % rr)
|
||||
return
|
||||
|
||||
if self.debug:
|
||||
print('DEBUG: Setting beam rate to %d (%f Hz)' % (rrSel,rr))
|
||||
return
|
||||
|
||||
|
||||
if self.beamline == 'ARAMIS':
|
||||
self.pv['BeamRRAR'].value = rrSel
|
||||
sleep(0.1)
|
||||
self.pv['BeamDelayAR'].value = 0 # 0 on beam, 1 on delay
|
||||
rrAT = self.pv['BeamRRAT'].value
|
||||
if rrAT < rrSel: # reduce athos at least to same rep rate as Aramis
|
||||
self.pv['BeamRRAT'].value = rrSel
|
||||
sleep(0.1)
|
||||
self.pv['BeamDelayAT'].value = 0 # 0 on beam, 1 on delay
|
||||
else:
|
||||
self.pv['BeamRRAT'].value = rrSel
|
||||
sleep(0.1)
|
||||
self.pv['BeamDelayAT'].value = 0 # 0 on beam, 1 on delay
|
||||
|
||||
sleep(0.1)
|
||||
if rrSel == 11:
|
||||
self.pv['RFDelay'].value =1
|
||||
else:
|
||||
self.pv['RFDelay'].value = 0
|
||||
sleep(0.1)
|
||||
self.pv['applyTiming'].value = 1
|
||||
60
interface/rf.py
Normal file
60
interface/rf.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from epics import PV,caput_many
|
||||
import numpy as np
|
||||
|
||||
class RF:
|
||||
def __init__(self, beamline = 'ARAMIS',debug=False, signal = None):
|
||||
self.beamline = beamline
|
||||
self.debug=debug
|
||||
self.signal = signal
|
||||
if self.beamline == 'ARAMIS':
|
||||
self.stations = ['S30CB%2.2d' % d for d in range(1,14)]
|
||||
self.RFamp = [PV('%s-RSYS:SET-ACC-VOLT' % station) for station in self.stations]
|
||||
self.RFphs = [PV('%s-RSYS:SET-BEAM-PHASE' % station) for station in self.stations]
|
||||
self.RFon = [PV('%s-RSYS:REQUIRED-OP' % station) for station in self.stations]
|
||||
self.ErgRange = [PV('SATSY01-MBND200:P-SET'),PV('SARCL02-MBND100:P-SET')]
|
||||
self.EGain = PV('S30:SET-E-GAIN-OP')
|
||||
self.EPhase= PV('S30:SET-BEAM-PHASE-OP')
|
||||
self.Ns = 13
|
||||
else:
|
||||
self.EGain = ['S20:SET-E-GAIN-OP']
|
||||
self.Ns = 0
|
||||
|
||||
def energyProfile(self):
|
||||
if self.Ns == 0:
|
||||
return np.array([0,0])
|
||||
valAmp = [pv.value for pv in self.RFamp]
|
||||
valPhs = [pv.value for pv in self.RFphs]
|
||||
valOn = [pv.value for pv in self.RFon]
|
||||
valErg = [pv.value for pv in self.ErgRange]
|
||||
|
||||
prof = np.zeros((self.Ns+1))
|
||||
prof[0] = valErg[0]
|
||||
for i in range(self.Ns):
|
||||
dE = 0
|
||||
if valOn[i] == 1:
|
||||
dE = valAmp[i]*np.sin(np.pi*valPhs[i]/180.)
|
||||
prof[i+1] = prof[i]+dE
|
||||
prof = np.array(prof)
|
||||
Egain = prof - prof[0]
|
||||
scl = (valErg[1]-valErg[0])/np.max(Egain)
|
||||
prof = Egain*scl+valErg[0]
|
||||
return prof
|
||||
|
||||
def changeEnergyGain(self,dE):
|
||||
if self.debug:
|
||||
print('DEBUG: Changing energy gain of linac by:',dE)
|
||||
return
|
||||
self.EGain.value = dE
|
||||
if dE > 0:
|
||||
self.EPhase.value = 90.
|
||||
else:
|
||||
self.EPhase.value = -90.
|
||||
|
||||
def setRF(self,chns,vals):
|
||||
if self.debug:
|
||||
for i,ele in enumerate(chns):
|
||||
print('DEBUG: RF-Settings %s: %f' % (ele,vals[i]))
|
||||
return
|
||||
caput_many(chns,vals)
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ def getDatasetFileName(program='Unknown'):
|
||||
path = '%s/%s' % (path,day)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
datetag = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S_%f')
|
||||
datetag = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
filename=('%s/%s_%s' % (path, program.replace(' ','_'), datetag))
|
||||
return filename
|
||||
|
||||
@@ -31,19 +31,19 @@ def saveDataset(program,data,actuator=None,snap=None,analysis=None,figures=None)
|
||||
# check if scan is multiple instances of a scan
|
||||
if isinstance(data,list):
|
||||
for iscan,singledata in enumerate(data):
|
||||
writeData(hid,singledata,iscan)
|
||||
writeData(hid,singledata,iscan+1)
|
||||
else:
|
||||
writeData(hid,data,1)
|
||||
# same for actuator
|
||||
if isinstance(actuator,list):
|
||||
for iscan,singleactuator in enumerate(actuator):
|
||||
writeActuator(hid,singleactuator,iscan)
|
||||
writeActuator(hid,singleactuator,iscan+1)
|
||||
else:
|
||||
writeActuator(hid,actuator,1)
|
||||
# and same for analysis
|
||||
if isinstance(analysis,list):
|
||||
for iscan,singleana in enumerate(analysis):
|
||||
writeAnalysis(hid,singleana,iscan)
|
||||
writeAnalysis(hid,singleana,iscan+1)
|
||||
else:
|
||||
writeAnalysis(hid,analysis,1)
|
||||
# write aux data
|
||||
@@ -94,9 +94,13 @@ def writeData(hid, data, scanrun=1):
|
||||
dset.attrs['system'] = getDatasetSystem(name[0])
|
||||
dset.attrs['units'] = 'unknown'
|
||||
# this part is obsolete - dimension should be given from the individual datasets
|
||||
if not 'pid' in data.keys():
|
||||
shape = None
|
||||
if 'pid' in data.keys():
|
||||
shape = data['pid'].shape
|
||||
if 'pulse_id' in data.keys():
|
||||
shape = data['pulse_id'].shape
|
||||
if shape is None:
|
||||
return
|
||||
shape = data['pid'].shape
|
||||
ndim = len(shape)
|
||||
nsam = shape[-1]
|
||||
nrec = 0
|
||||
|
||||
@@ -25,23 +25,16 @@ class SlicScan(QObject):
|
||||
self.data = None
|
||||
self.act = None
|
||||
self.snap = None
|
||||
self.doSnap = False
|
||||
|
||||
def start(self,daq,snap=False):
|
||||
self.clear()
|
||||
self.doSnap = snap
|
||||
self.daq=daq
|
||||
Thread(target=self.Tmonitor).start()
|
||||
self.startSnap(snap)
|
||||
|
||||
def startSnap(self,snap=False):
|
||||
if snap:
|
||||
Thread(target=self.Tsnap).start()
|
||||
|
||||
def Tsnap(self):
|
||||
self.snap = getSnap()
|
||||
self.sigsnap.emit(True)
|
||||
Thread(name='Scan-Monitor',target=self.Tmonitor).start()
|
||||
|
||||
def Tmonitor(self):
|
||||
mythread = Thread(target=self.Tscanner).start()
|
||||
mythread = Thread(name='Slic-Scanner',target=self.Tscanner).start()
|
||||
time.sleep(1)
|
||||
ostep = -1
|
||||
while(self.daq.running()):
|
||||
@@ -58,10 +51,21 @@ class SlicScan(QObject):
|
||||
if hasattr(self.daq,'auxdata'):
|
||||
self.data.update(self.daq.auxdata)
|
||||
self.sigterm.emit(istep==nstep)
|
||||
# if requested add snapshot acquisition
|
||||
if self.doSnap:
|
||||
self.startSnap()
|
||||
|
||||
def Tscanner(self):
|
||||
self.daq.scan()
|
||||
|
||||
def startSnap(self):
|
||||
Thread(name='Snap-Acquisition',target=self.Tsnap).start()
|
||||
|
||||
def Tsnap(self):
|
||||
self.snap = getSnap()
|
||||
print('Acquired snap')
|
||||
self.sigsnap.emit(True)
|
||||
|
||||
def stop(self):
|
||||
self.daq.stop()
|
||||
|
||||
|
||||
@@ -57,15 +57,34 @@ def getSnap(pvs=None):
|
||||
ret={}
|
||||
val = epics.caget_many(pvs)
|
||||
for i,pv in enumerate(pvs):
|
||||
if val[i]: # filter out None values
|
||||
if not val[i] is None: # filter out None values
|
||||
ret[pv]=float(val[i])
|
||||
epics.ca.clear_cache()
|
||||
return ret
|
||||
|
||||
def saveSnap(pvs={},label="", comment = "generated by application"):
|
||||
def getSnapPV(pvs=None):
|
||||
if not isinstance(pvs,list):
|
||||
pvs = parseSnapShotReqYAML(pvs)
|
||||
if not pvs:
|
||||
return
|
||||
return [epics.PV(pvname,auto_monitor = False) for pvname in pvs]
|
||||
|
||||
|
||||
def getSnapVal(pvs=None):
|
||||
if not pvs:
|
||||
return None
|
||||
ret={}
|
||||
val = [pv.get(timeout=0.1) for pv in pvs]
|
||||
for i,pv in enumerate(pvs):
|
||||
if not val[i] is None: # filter out None values
|
||||
ret[pv.pvname]=float(val[i])
|
||||
else:
|
||||
print('Timeout:',pv.pvname)
|
||||
return ret
|
||||
|
||||
def saveSnap(pvs={},label="", comment = "generated by application",reqfile = "SF_settings.yaml"):
|
||||
filename = datetime.datetime.now().strftime('/sf/data/applications/snapshot/SF_settings_%Y%m%d_%H%M%S.snap')
|
||||
with open(filename,'w') as fid:
|
||||
fid.write('#{"labels":["%s"],"comment":"%s", "machine_parms":{}, "save_time": 0.0, "req_file_name": "SF_settings.yaml"}\n' % (label,comment))
|
||||
fid.write('#{"labels":["%s"],"comment":"%s", "machine_parms":{}, "save_time": 0.0, "req_file_name": "%s"}\n' % (label,comment,reqfile))
|
||||
for key in pvs.keys():
|
||||
if isinstance(pvs[key],int):
|
||||
fid.write('%s,{"val": %d}\n' % (key,pvs[key]))
|
||||
@@ -73,6 +92,21 @@ def saveSnap(pvs={},label="", comment = "generated by application"):
|
||||
fid.write('%s,{"val": %f}\n' % (key,pvs[key]))
|
||||
elif isinstance(pvs[key],str):
|
||||
fid.write('%s,{"val": %s}\n' % (key,pvs[key]))
|
||||
return filename
|
||||
|
||||
def loadSnap(filename):
|
||||
res={}
|
||||
with open(filename,'r') as fid:
|
||||
lines=fid.readlines()
|
||||
for line in lines[1:]:
|
||||
split = line.split(',')
|
||||
pv = split[0].strip()
|
||||
val = split[1].split('"val":')[1].split('}')[0]
|
||||
if '.' in val:
|
||||
res[pv]=float(val)
|
||||
else:
|
||||
res[pv]= val # might be string!!!!
|
||||
return res
|
||||
|
||||
|
||||
|
||||
|
||||
33
template/ServerTemplate.py
Normal file
33
template/ServerTemplate.py
Normal file
@@ -0,0 +1,33 @@
|
||||
|
||||
import signal
|
||||
|
||||
import ServerBase
|
||||
|
||||
class ServerTemplate(ServerBase.ServerBase):
|
||||
def __init__(self, PVroot = 'MyServer', debug = False):
|
||||
self.version='1.0.0'
|
||||
self.program ='Server Template'
|
||||
super(ServerTemplate, self).__init__(PVroot,debug,'127.0.0.1', 5678) # last too numbers are the IP adress and port of watchdog
|
||||
|
||||
# connect to the individual handler, which must have the function terminate() implemented
|
||||
# each handler should be their own thread to not interfere with the main process-loop
|
||||
self.handler={}
|
||||
|
||||
def terminateSubThreads(self):
|
||||
for subserver in self.handler.keys():
|
||||
self.handler[subserver].terminate()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
debug = True
|
||||
server = ServerTemplate('SF-BC-SERVER', debug)
|
||||
signal.signal(signal.SIGTERM,server.terminate)
|
||||
try:
|
||||
server.run()
|
||||
except KeyboardInterrupt:
|
||||
server.terminate(None,None)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
from .zmqbase import ZMQBase
|
||||
from .serverbase import ServerBase
|
||||
from .simplecapture import SimpleCapture
|
||||
|
||||
|
||||
|
||||
92
util/serverbase.py
Normal file
92
util/serverbase.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import sys
|
||||
import signal
|
||||
import os
|
||||
import socket
|
||||
import logging
|
||||
import logging.handlers
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
from epics import PV
|
||||
sys.path.append('/sf/bd/packages/sfbd')
|
||||
from sfbd.util import ZMQBase
|
||||
|
||||
class ServerBase(ZMQBase):
|
||||
def __init__(self, root = 'MyServer', debug = False, WDServer = '127.0.0.1', WDPort = 5678):
|
||||
|
||||
super(ServerBase,self).__init__(WDServer,WDPort)
|
||||
|
||||
self.debug = debug
|
||||
self.root = root
|
||||
self.suffix=''
|
||||
if self.debug:
|
||||
self.suffix='-SIMU'
|
||||
self.host = socket.gethostname()
|
||||
self.pid = os.getpid() # process ID
|
||||
|
||||
# enabling logging
|
||||
self.logfilename="/sf/data/applications/BD-SERVER/%s.log" % self.root
|
||||
handler = RotatingFileHandler(filename=self.logfilename,
|
||||
mode='a',
|
||||
maxBytes=5 * 1024 * 1024,
|
||||
backupCount=1,
|
||||
delay=0)
|
||||
if self.debug:
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
|
||||
datefmt='%m-%d %H:%M:%S')
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
|
||||
datefmt='%m-%d %H:%M:%S',
|
||||
handlers=[handler,])
|
||||
self.logger=logging.getLogger(self.program)
|
||||
|
||||
# setting up ZMQ interface
|
||||
self.ZMQServerInfo(self.root,self.host,self.pid)
|
||||
|
||||
# individual channels of main thread
|
||||
self.PVstop = PV('%s:STOP%s' % (self.root,self.suffix))
|
||||
self.PVstop.value = 0
|
||||
self.PVstop.add_callback(self.stop)
|
||||
self.PVping = PV('%s:PING%s' % (self.root,self.suffix))
|
||||
self.PVlog = PV('%s:LOG%s' % (self.root,self.suffix))
|
||||
|
||||
def stop(self,pvname=None,value=None,**kws):
|
||||
if value > 0:
|
||||
self.logger.info('PV:STOP triggered at %s' % datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
|
||||
self.running=False
|
||||
|
||||
def start(self):
|
||||
self.logger.info('Starting Server: %s at %s' % (self.root,datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
|
||||
self.logger.info('PV Root: %s' % self.root)
|
||||
self.logger.info('Version: %s' % self.version)
|
||||
self.logger.info('Host: %s' % self.host)
|
||||
self.logger.info('PID: %d' % self.pid)
|
||||
if self.debug:
|
||||
self.logger.info('Debug Mode')
|
||||
|
||||
def run(self):
|
||||
self.start()
|
||||
self.running=True
|
||||
while self.running:
|
||||
time.sleep(1)
|
||||
if self.ZMQPoll():
|
||||
self.logger.info('Watchdog Server requested termination')
|
||||
self.running = False
|
||||
self.PVping.value = datetime.now().strftime('Last active at %Y-%m-%d %H:%M:%S')
|
||||
self.terminate(None,None)
|
||||
|
||||
def terminate(self,signum,frame):
|
||||
# stopping any sub thread with the server specific function terminateSubThreads
|
||||
self.terminateSubThreads()
|
||||
self.logger.info('Terminating Server at %s' % datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
|
||||
self.ZMQPoll('quit') # informing the watchdog
|
||||
print('Bunch Compressor Server is quitting...')
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
38
util/simplecapture.py
Normal file
38
util/simplecapture.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import numpy as np
|
||||
|
||||
from bstrd import BSCache
|
||||
|
||||
class SimpleCapture:
|
||||
def __init__(self):
|
||||
self.chn=None
|
||||
self.bs = BSCache(100000, receive_timeout=100000) # 1000 second time out, capazity for 1000 second.
|
||||
self.abort=False
|
||||
|
||||
def terminate(self):
|
||||
self.bs.stop()
|
||||
self.bs.pt.running.clear()
|
||||
|
||||
def stop(self):
|
||||
self.abort=True
|
||||
|
||||
def initChannels(self,channels):
|
||||
self.chn=channels
|
||||
self.bs.get_vars(channels)
|
||||
|
||||
def acquire(self,N=1,reprate=-1,stop=False):
|
||||
self.abort=False
|
||||
data = np.zeros((N,len(self.chn)))
|
||||
pid = np.zeros((N))
|
||||
self.bs.flush()
|
||||
idx = 0
|
||||
while idx<N and not self.abort:
|
||||
rec=self.bs.__next__()
|
||||
pid[idx] = rec['pid']
|
||||
data[idx,:] = np.array([rec[ch] for ch in self.chn])
|
||||
idx += 1
|
||||
ret={}
|
||||
for i,ch in enumerate(self.chn):
|
||||
ret[ch]=data[:,i]
|
||||
ret['pid'] = pid
|
||||
|
||||
return ret
|
||||
70
util/zmqbase.py
Normal file
70
util/zmqbase.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import zmq
|
||||
import socket
|
||||
import sys
|
||||
|
||||
class ZMQBase:
|
||||
def __init__(self, host='127.0.0.1',port = 5678):
|
||||
|
||||
self.host=host
|
||||
self.port = port
|
||||
self.msg={'action':'','PV':'','host':'','pid':0}
|
||||
|
||||
self.REQUEST_TIMEOUT = 500
|
||||
self.REQUEST_RETRIES = 2
|
||||
self.SERVER_ENDPOINT = "tcp://%s:%d" % (host,port)
|
||||
self.serverIsOffline=False # assume that it is online
|
||||
|
||||
def ZMQServerInfo(self,PVroot,host,pid):
|
||||
self.msg['PV']=PVroot
|
||||
self.msg['host']=host
|
||||
self.msg['pid']=pid
|
||||
|
||||
|
||||
def ZMQPoll(self,tag='ping'):
|
||||
self.msg['action']=tag
|
||||
context = zmq.Context()
|
||||
client = context.socket(zmq.REQ)
|
||||
client.connect(self.SERVER_ENDPOINT)
|
||||
client.send_pyobj(self.msg)
|
||||
|
||||
retries_left = self.REQUEST_RETRIES
|
||||
while True:
|
||||
if (client.poll(self.REQUEST_TIMEOUT) & zmq.POLLIN) != 0:
|
||||
reply = client.recv_pyobj()
|
||||
check = self.ZMQIdentifyReply(reply)
|
||||
if self.serverIsOffline:
|
||||
self.logger.info("Watchdog server came online")
|
||||
self.serverIsOffline=False
|
||||
if check:
|
||||
if reply['action'] == 'quit':
|
||||
client.close()
|
||||
context.term()
|
||||
return (reply['action'] == 'quit')
|
||||
else:
|
||||
self.logger.warning("Malformed reply from server")
|
||||
continue
|
||||
|
||||
retries_left -= 1
|
||||
# Socket is confused. Close and remove it.
|
||||
client.setsockopt(zmq.LINGER, 0)
|
||||
client.close()
|
||||
if retries_left == 0:
|
||||
if not self.serverIsOffline:
|
||||
self.logger.info("Watchdog server seems to be offline")
|
||||
self.serverIsOffline=True
|
||||
context.term()
|
||||
return False
|
||||
|
||||
# Create new connection
|
||||
client = context.socket(zmq.REQ)
|
||||
client.connect(self.SERVER_ENDPOINT)
|
||||
client.send_pyobj(self.msg)
|
||||
|
||||
|
||||
def ZMQIdentifyReply(self,reply):
|
||||
for field in ['PV','host','pid']:
|
||||
if not reply[field] == self.msg[field]:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
Reference in New Issue
Block a user