304 lines
17 KiB
Python
304 lines
17 KiB
Python
|
|
import os
|
|
import numpy as np
|
|
from ROOT import *
|
|
from multiprocessing import Pool
|
|
from array import array
|
|
|
|
_cfg = {} ### global config
|
|
_aduToKev3DMap = None ### global caliFile
|
|
_pedestalAduFrame = None ### global pedestalAduFrame
|
|
_noiseEneFrame = None ### global noiseEneFrame
|
|
h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc = None, None, None, None
|
|
sumFrame = np.zeros((400, 400), dtype=np.float32) ### global sumFrame for all frames
|
|
EtaxLUT, EtayLUT = None, None ### global LUT for charge weighted center
|
|
nChunks = 64
|
|
|
|
def init(cfg):
|
|
global _cfg
|
|
global _aduToKev3DMap
|
|
_cfg = cfg
|
|
_aduToKev3DMap = cfg['caliFile']
|
|
Roi = cfg['Roi']
|
|
global sumFrame
|
|
sumFrame = sumFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]] ### crop the sumFrame to the ROI
|
|
if 'etaxLUT' in cfg and 'etayLUT' in cfg:
|
|
global EtaxLUT, EtayLUT
|
|
EtaxLUT = np.load(cfg['etaxLUT'])
|
|
EtayLUT = np.load(cfg['etayLUT'])
|
|
|
|
def convertAduFrameToEnergyFrame(aduFrame):
|
|
if _aduToKev3DMap is None:
|
|
return aduFrame
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
if _aduToKev3DMap.shape[0] == 1640:
|
|
idxFrame = (np.abs(aduFrame//10)).astype(np.int32)
|
|
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
|
|
NY, NX = _aduToKev3DMap.shape[1:3]
|
|
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/10 * (np.abs(aduFrame) - idxFrame*10)
|
|
energyFrame *= np.sign(aduFrame)
|
|
|
|
else:
|
|
aduPerBin = 10
|
|
nExtraBins = 1000 // aduPerBin
|
|
idxFrame = (aduFrame//aduPerBin).astype(np.int32) + nExtraBins
|
|
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
|
|
NY, NX = _aduToKev3DMap.shape[1:3]
|
|
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/aduPerBin * (aduFrame - (idxFrame-nExtraBins)*aduPerBin)
|
|
if 'energyScalingFactor' in _cfg:
|
|
energyFrame *= _cfg['energyScalingFactor']
|
|
return energyFrame
|
|
|
|
def bookHistograms(energy, suffix = '', energyBinWidth = 0.1, isMC = False):
|
|
histMaxEnergy = energy * 1.3
|
|
h1_ClusterEnergy = TH1D(f'h1_ClusterEnergy{suffix}', f'h1_ClusterEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
|
|
h1_ClusterEnergy.SetDirectory(0)
|
|
h1_ClusterEnergy.SetTitle(f'Cluster Energy;Energy [keV];Counts')
|
|
h1_PixelEnergy = TH1D(f'h1_PixelEnergy{suffix}', f'h1_PixelEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
|
|
h1_PixelEnergy.SetDirectory(0)
|
|
h1_PixelEnergy.SetTitle(f'Pixel Energy;Energy [keV];Counts')
|
|
h1_CenterPixelEnergy = TH1D(f'h1_CenterPixelEnergy{suffix}', f'h1_CenterPixelEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
|
|
h1_CenterPixelEnergy.SetDirectory(0)
|
|
h1_CenterPixelEnergy.SetTitle(f'Center Pixel Energy;Energy [keV];Counts')
|
|
h2_Qc = TH2D(f'h2_Qc{suffix}', f'h2_Qc{suffix}', 201, 0, 1, 201, 0, 1)
|
|
h2_Qc.SetDirectory(0)
|
|
h2_Qc.SetTitle(f'Charge weighted center;#eta_x;#eta_y;Counts')
|
|
h2_counts = TH2D(f'h2_counts{suffix}', f'h2_counts{suffix}', _cfg['Roi'][1] - _cfg['Roi'][0], _cfg['Roi'][0], _cfg['Roi'][1], _cfg['Roi'][3] - _cfg['Roi'][2], _cfg['Roi'][2], _cfg['Roi'][3])
|
|
h2_counts.SetDirectory(0)
|
|
h2_RawFrame = TH2D(f'h2_RawFrame{suffix}', f'h2_RawFrame{suffix}', _cfg['Roi'][1] - _cfg['Roi'][0], _cfg['Roi'][0], _cfg['Roi'][1], _cfg['Roi'][3] - _cfg['Roi'][2], _cfg['Roi'][2], _cfg['Roi'][3])
|
|
h2_RawFrame.SetDirectory(0)
|
|
interpolationBins = 10
|
|
xBins = (_cfg['Roi'][1] - _cfg['Roi'][0]) * interpolationBins
|
|
yBins = (_cfg['Roi'][3] - _cfg['Roi'][2]) * interpolationBins
|
|
h2_interpolated = TH2D(f'h2_interpolated{suffix}', f'h2_interpolated{suffix}', xBins, _cfg['Roi'][0], _cfg['Roi'][1], yBins, _cfg['Roi'][2], _cfg['Roi'][3])
|
|
h2_interpolated.SetDirectory(0)
|
|
h2_interpolatedSubpixel = TH2D(f'h2_interpolatedSubpixel{suffix}', f'h2_interpolatedSubpixel{suffix}', interpolationBins, 0, 1, interpolationBins, 0, 1)
|
|
h2_interpolatedSubpixel.SetDirectory(0)
|
|
|
|
if isMC:
|
|
h1_ChargeCollectionEfficiency = TH1D(f'h1_ChargeCollectionEfficiency{suffix}', f'h1_ChargeCollectionEfficiency{suffix}', 400, 0.8, 1.01)
|
|
return h1_ClusterEnergy.Clone(), h1_PixelEnergy.Clone(), h1_CenterPixelEnergy.Clone(), h2_Qc.Clone(), h1_ChargeCollectionEfficiency.Clone()
|
|
|
|
return h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_RawFrame, h2_interpolated, h2_interpolatedSubpixel
|
|
|
|
def bilinear_xy_lookup(etaX, etaY, X_tab, Y_tab):
|
|
"""
|
|
bilinear interpolation, from (eta_x, eta_y) to (u,v):
|
|
"""
|
|
x = np.asarray(etaX); y = np.asarray(etaY)
|
|
nBinX, nBinY = X_tab.shape
|
|
|
|
### bin centers
|
|
x_centers = (np.arange(nBinX) + 0.5) / nBinX
|
|
y_centers = (np.arange(nBinY) + 0.5) / nBinY
|
|
|
|
# search bin
|
|
ix = np.clip(np.searchsorted(x_centers, x) - 1, 0, nBinX-2)
|
|
iy = np.clip(np.searchsorted(y_centers, y) - 1, 0, nBinY-2)
|
|
|
|
# four corners
|
|
x0 = x_centers[ix]; x1 = x_centers[ix+1]
|
|
y0 = y_centers[iy]; y1 = y_centers[iy+1]
|
|
tx = np.where(x1>x0, (x - x0)/(x1 - x0), 0.0)
|
|
ty = np.where(y1>y0, (y - y0)/(y1 - y0), 0.0)
|
|
|
|
# values at corners
|
|
X00 = X_tab[ix, iy ]
|
|
X10 = X_tab[ix+1, iy ]
|
|
X01 = X_tab[ix, iy+1]
|
|
X11 = X_tab[ix+1, iy+1]
|
|
Y00 = Y_tab[ix, iy ]
|
|
Y10 = Y_tab[ix+1, iy ]
|
|
Y01 = Y_tab[ix, iy+1]
|
|
Y11 = Y_tab[ix+1, iy+1]
|
|
|
|
# bilinear interpolation
|
|
X0 = (1-tx)*X00 + tx*X10
|
|
X1 = (1-tx)*X01 + tx*X11
|
|
Y0 = (1-tx)*Y00 + tx*Y10
|
|
Y1 = (1-tx)*Y01 + tx*Y11
|
|
X = (1-ty)*X0 + ty*X1
|
|
Y = (1-ty)*Y0 + ty*Y1
|
|
return X, Y
|
|
|
|
def _processFrames(idxChunk): ###
|
|
startFrame, endFrame = _cfg['NFrame'] * idxChunk // nChunks, min(_cfg['NFrame'] * (idxChunk + 1) // nChunks, _cfg['NFrame'])
|
|
Energy = _cfg['energy']
|
|
selectionRange = _cfg['selectionRange']
|
|
signalFileNames = _cfg['signalFileNames']
|
|
clusterWidth = _cfg['clusterWidth']
|
|
NX = _cfg['NX']
|
|
NY = _cfg['NY']
|
|
headerSize = _cfg['headerSize']
|
|
Roi = _cfg['Roi']
|
|
nFramePerFile = os.path.getsize(f'{signalFileNames[0]}') // (NX * NY + 56) // 2
|
|
_h1_PixelEnergy, _h1_ClusterEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_RawFrame, _h2_interpolated, _h2_interpolatedSubpixel = bookHistograms(Energy, f'_{idxChunk}')
|
|
|
|
|
|
for idxFrame in range(startFrame, endFrame):
|
|
if idxFrame % 10000 == 0:
|
|
print(f'Processing frame {idxFrame}...')
|
|
idxFile, idxFrame = divmod(idxFrame, nFramePerFile)
|
|
try:
|
|
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
|
|
offset = (headerSize + idxFrame * (NX * NY//2 + headerSize)) * 2
|
|
fileName1 = signalFileNames[idxFile]
|
|
fileName2 = fileName1.replace('d0', 'd1')
|
|
_signalAduFrame1 = np.fromfile(f'{fileName1}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
|
|
_signalAduFrame2 = np.fromfile(f'{fileName2}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
|
|
signalAduFrame = np.concatenate([_signalAduFrame1, _signalAduFrame2], axis=0)
|
|
del _signalAduFrame1, _signalAduFrame2
|
|
else:
|
|
offset = (headerSize + idxFrame * (NX * NY + headerSize)) * 2
|
|
signalAduFrame = np.fromfile(f'{signalFileNames[idxFile]}', dtype=np.uint16, offset=offset, count=NX * NY).astype(np.int32).reshape(NX, NY)
|
|
except Exception as e:
|
|
print(f'Error reading frame {idxFrame} from file {signalFileNames[idxFile]}: {e}, stop processing this chunk ({startFrame}-{endFrame})')
|
|
return _h1_ClusterEnergy.Clone(), _h1_PixelEnergy.Clone(), _h1_CenterPixelEnergy.Clone(), _h2_Qc.Clone(), _h2_counts.Clone(), _h2_RawFrame.Clone(), _h2_interpolated.Clone(), _h2_interpolatedSubpixel.Clone()
|
|
|
|
signalAduFrame = signalAduFrame - _pedestalAduFrame
|
|
signalEneFrame = convertAduFrameToEnergyFrame(signalAduFrame)
|
|
|
|
for x in range(Roi[0], Roi[1]):
|
|
for y in range(Roi[2], Roi[3]):
|
|
_h2_RawFrame.Fill(x, y, signalEneFrame[y, x])
|
|
### 5 sigma noise cut
|
|
if signalEneFrame[y, x] < 5 * _noiseEneFrame[y, x]:
|
|
continue
|
|
### local maximum
|
|
if signalEneFrame[y, x] != np.max(signalEneFrame[y-1:y+2, x-1:x+2]):
|
|
continue
|
|
### pile-up cut, 3*simga criterion for the 7*7 cluster excluding the central 3*3 cluster
|
|
r = 3 ### 7*7 cluster
|
|
over3Sigma = signalEneFrame[y-r:y+r+1, x-r:x+r+1] > 3 * _noiseEneFrame[y-r:y+r+1, x-r:x+r+1]
|
|
over3Sigma[r-1:r+2, r-1:r+2] = False
|
|
if np.sum(over3Sigma) > 0:
|
|
continue
|
|
|
|
### get the cluster
|
|
# pixleEnergies = signalEneFrame[y-1:y+2, x-1:x+2].flatten()
|
|
pixleEnergies = signalEneFrame[y-clusterWidth//2:y+clusterWidth//2+1, x-clusterWidth//2:x+clusterWidth//2+1].flatten()
|
|
clusterEnergy = np.sum(pixleEnergies)
|
|
if 'coefficents' in _cfg:
|
|
k, b = _cfg['coefficents']
|
|
clusterEnergy = (clusterEnergy - b) / k
|
|
pixleEnergies = (pixleEnergies - b) / k
|
|
|
|
### fill the qc and the interpolated histograms before the bad pixel cut
|
|
if Energy - selectionRange < clusterEnergy < Energy + selectionRange:
|
|
x_qc = (np.average(np.arange(clusterWidth), weights=signalEneFrame[y-clusterWidth//2:y+clusterWidth//2+1, x-clusterWidth//2:x+clusterWidth//2+1].sum(axis=0)) + 0.5)%1
|
|
y_qc = (np.average(np.arange(clusterWidth), weights=signalEneFrame[y-clusterWidth//2:y+clusterWidth//2+1, x-clusterWidth//2:x+clusterWidth//2+1].sum(axis=1)) + 0.5)%1
|
|
_h2_Qc.Fill(x_qc, y_qc)
|
|
_h2_counts.Fill(x, y)
|
|
if 'etaxLUT' in _cfg and 'etayLUT' in _cfg:
|
|
x_frac, y_frac = bilinear_xy_lookup(x_qc, y_qc, EtaxLUT, EtayLUT)
|
|
_h2_interpolated.Fill(x + x_frac, y + y_frac)
|
|
_h2_interpolatedSubpixel.Fill(x_frac, y_frac)
|
|
|
|
### bad pixel cut: if bad pixel is in the 5*5 cluster, skip this cluster
|
|
if _aduToKev3DMap is not None and np.any(_aduToKev3DMap[-1, y-2:y+3, x-2:x+3] < 0):
|
|
continue
|
|
_h1_ClusterEnergy.Fill(clusterEnergy)
|
|
|
|
### fill the energy histograms after the bad pixel cut
|
|
### energy window cut
|
|
if Energy - selectionRange < clusterEnergy < Energy + selectionRange:
|
|
_h1_CenterPixelEnergy.Fill(signalEneFrame[y, x])
|
|
_h1_PixelEnergy.FillN(len(pixleEnergies), array('d', pixleEnergies), array('d', np.ones(len(pixleEnergies))))
|
|
|
|
# return _h1_ClusterEnergy.Clone(), _h1_PixelEnergy.Clone(), _h1_CenterPixelEnergy.Clone(), _h2_Qc.Clone(), signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]]
|
|
return _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_RawFrame, _h2_interpolated, _h2_interpolatedSubpixel
|
|
|
|
def process():
|
|
from multiprocessing import Pool
|
|
with Pool(_cfg['NThread'], maxtasksperchild=1) as p:
|
|
results = p.map(_processFrames, range(nChunks))
|
|
|
|
global h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_RawFrame, h2_interpolated, h2_interpolatedSubpixel
|
|
h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_RawFrame, h2_interpolated, h2_interpolatedSubpixel = bookHistograms(_cfg['energy'])
|
|
for h in (h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_RawFrame, h2_interpolated, h2_interpolatedSubpixel):
|
|
h.SetDirectory(0)
|
|
|
|
global sumFrame
|
|
for _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_RawFrame, _h2_interpolated, _h2_interpolatedSubpixel in results:
|
|
h1_ClusterEnergy.Add(_h1_ClusterEnergy)
|
|
h1_PixelEnergy.Add(_h1_PixelEnergy)
|
|
h1_CenterPixelEnergy.Add(_h1_CenterPixelEnergy)
|
|
h2_Qc.Add(_h2_Qc)
|
|
h2_interpolated.Add(_h2_interpolated)
|
|
h2_interpolatedSubpixel.Add(_h2_interpolatedSubpixel)
|
|
h2_counts.Add(_h2_counts)
|
|
h2_RawFrame.Add(_h2_RawFrame)
|
|
del _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_RawFrame, _h2_interpolated, _h2_interpolatedSubpixel
|
|
|
|
c = TCanvas("c", "c", 1600, 800)
|
|
c.Divide(2, 1)
|
|
pad1 = c.cd(1)
|
|
# pad1.SetLogy(1)
|
|
h1_ClusterEnergy.Draw()
|
|
### fit the cluster energy
|
|
center = h1_ClusterEnergy.GetBinCenter(h1_ClusterEnergy.GetMaximumBin())
|
|
h1_ClusterEnergy.Fit('gaus', 'Q', '', center - 0.5, center + 1)
|
|
center = h1_ClusterEnergy.GetFunction('gaus').GetParameter(1)
|
|
sigma = h1_ClusterEnergy.GetFunction('gaus').GetParameter(2)
|
|
h1_ClusterEnergy.Fit('gaus', 'Q', '', center - sigma, center + sigma)
|
|
print(f'cluster energy = {h1_ClusterEnergy.GetFunction("gaus").GetParameter(1):.3f} +- {h1_ClusterEnergy.GetFunction("gaus").GetParError(1):.3f}, width = {h1_ClusterEnergy.GetFunction("gaus").GetParameter(2):.3f} +- {h1_ClusterEnergy.GetFunction("gaus").GetParError(2):.3f}, Chi2/NDF = {h1_ClusterEnergy.GetFunction("gaus").GetChisquare()/(h1_ClusterEnergy.GetFunction("gaus").GetNDF()+1e-9):.3f}')
|
|
pad2 = c.cd(2); pad2.SetLogy(1)
|
|
h1_PixelEnergy.Draw()
|
|
|
|
### fit the pixel energy
|
|
# center = _cfg['energy']
|
|
# h1_PixelEnergy.Fit('gaus', 'Q', '', center - 0.1, center + 1)
|
|
# center = h1_PixelEnergy.GetFunction('gaus').GetParameter(1)
|
|
# sigma = h1_PixelEnergy.GetFunction('gaus').GetParameter(2)
|
|
# h1_PixelEnergy.Fit('gaus', 'Q', '', center - 0.5*sigma, center + 2*sigma)
|
|
# h1_PixelEnergy.GetYaxis().SetRangeUser(h1_PixelEnergy.GetMaximum() * 1e-5, h1_PixelEnergy.GetMaximum() * 2)
|
|
# print(f'pixel energy = {h1_PixelEnergy.GetFunction("gaus").GetParameter(1):.3f} +- {h1_PixelEnergy.GetFunction("gaus").GetParError(1):.3f}, width = {h1_PixelEnergy.GetFunction("gaus").GetParameter(2):.3f} +- {h1_PixelEnergy.GetFunction("gaus").GetParError(2):.3f}, Chi2/NDF = {h1_PixelEnergy.GetFunction("gaus").GetChisquare()/(h1_PixelEnergy.GetFunction("gaus").GetNDF()+1e-6):.3f}')
|
|
return c.Clone()
|
|
|
|
def getPedestalAndNoise():
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
pedestalFileName = _cfg['pedestalFileName']
|
|
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
|
|
pedestalFileName2 = pedestalFileName.replace('d0', 'd1')
|
|
_pedestalAduFrames1 = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
_pedestalAduFrames1 = _pedestalAduFrames1.reshape(-1, NX * NY //2 + 56)
|
|
_pedestalAduFrames1 = _pedestalAduFrames1[:, 56:].reshape(-1, NY//2, NX)
|
|
_pedestalAduFrames2 = np.fromfile(f'{pedestalFileName2}', dtype=np.uint16).astype(np.int32)
|
|
_pedestalAduFrames2 = _pedestalAduFrames2.reshape(-1, NX * NY //2 + 56)
|
|
_pedestalAduFrames2 = _pedestalAduFrames2[:, 56:].reshape(-1, NY//2, NX)
|
|
pedestalAduFrames = np.concatenate([_pedestalAduFrames1, _pedestalAduFrames2], axis=1)
|
|
del _pedestalAduFrames1, _pedestalAduFrames2
|
|
else:
|
|
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
|
|
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
|
|
|
|
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
|
|
global _pedestalAduFrame, _noiseEneFrame
|
|
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
|
|
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
|
|
with Pool(16) as p:
|
|
pedestalEneFrames = p.map(convertAduFrameToEnergyFrame, pedestalAduFrames)
|
|
_noiseEneFrame = np.std(pedestalEneFrames, axis=0, ddof=1)
|
|
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
|
|
del pedestalAduFrames, pedestalEneFrames
|
|
|
|
def getPedestalAndNoise_simplified():
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
pedestalFileName = _cfg['pedestalFileName']
|
|
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
|
|
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
|
|
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
|
|
global _pedestalAduFrame, _noiseEneFrame
|
|
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
|
|
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
|
|
_noiseEneFrame = convertAduFrameToEnergyFrame(noiseAduFrame)
|
|
|
|
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
|
|
del pedestalAduFrames
|