436 lines
22 KiB
Python
436 lines
22 KiB
Python
|
|
import os
|
|
import numpy as np
|
|
from ROOT import TH1D, TH2D, TCanvas, TF1, TFile
|
|
from multiprocessing import Pool
|
|
from array import array
|
|
import h5py
|
|
from multiprocessing import Pool
|
|
import aare
|
|
|
|
_cfg = {} ### global config
|
|
_aduToKev3DMap = None ### global caliFile
|
|
_pedestalAduFrame = None ### global pedestalAduFrame
|
|
_noiseEneFrame = None ### global noiseEneFrame
|
|
nChunks = 16
|
|
clusterSize2Photon = 7
|
|
clusterSize1Photon = 3
|
|
|
|
def init(cfg):
|
|
global _cfg
|
|
global _aduToKev3DMap
|
|
_cfg = cfg
|
|
_aduToKev3DMap = np.load(cfg['caliFileName'])
|
|
Roi = cfg['Roi']
|
|
global nChunks
|
|
nChunks = _cfg.get('NChunks', 16)
|
|
|
|
def convertAduFrameToEnergyFrame(aduFrame):
|
|
if _aduToKev3DMap is None:
|
|
return aduFrame
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
if _aduToKev3DMap.shape[0] == 1640:
|
|
idxFrame = (np.abs(aduFrame//10)).astype(np.int32)
|
|
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
|
|
NY, NX = _aduToKev3DMap.shape[1:3]
|
|
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/10 * (np.abs(aduFrame) - idxFrame*10)
|
|
energyFrame *= np.sign(aduFrame)
|
|
|
|
else:
|
|
aduPerBin = 10
|
|
nExtraBins = 1000 // aduPerBin
|
|
idxFrame = (aduFrame//aduPerBin).astype(np.int32) + nExtraBins
|
|
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
|
|
NY, NX = _aduToKev3DMap.shape[1:3]
|
|
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
|
|
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/aduPerBin * (aduFrame - (idxFrame-nExtraBins)*aduPerBin)
|
|
if 'energyScalingFactor' in _cfg:
|
|
energyFrame *= _cfg['energyScalingFactor']
|
|
return energyFrame
|
|
|
|
def bookHistograms(energy, suffix='', energyBinWidth=0.1, isMC=False):
|
|
histMaxEnergy = energy * 5.5
|
|
nEnergyBins = int(histMaxEnergy // energyBinWidth)
|
|
roi_x0, roi_x1, roi_y0, roi_y1 = _cfg['Roi'][0], _cfg['Roi'][1], _cfg['Roi'][2], _cfg['Roi'][3]
|
|
roi_width = roi_x1 - roi_x0
|
|
roi_height = roi_y1 - roi_y0
|
|
interpolationBins = 10
|
|
|
|
histograms = {}
|
|
histograms['h1_ClusterEnergy'] = TH1D(
|
|
f'h1_ClusterEnergy{suffix}', f'h1_ClusterEnergy{suffix}', nEnergyBins, -1, histMaxEnergy
|
|
)
|
|
histograms['h1_ClusterEnergy'].SetTitle('Cluster Energy;Energy [keV];Counts')
|
|
|
|
histograms['h1_PixelEnergy'] = TH1D(
|
|
f'h1_PixelEnergy{suffix}', f'h1_PixelEnergy{suffix}', nEnergyBins, -1, histMaxEnergy
|
|
)
|
|
histograms['h1_PixelEnergy'].SetTitle('Pixel Energy;Energy [keV];Counts')
|
|
|
|
histograms['h1_1PhotonClusterSize'] = TH1D(
|
|
f'h1_1PhotonClusterSize{suffix}', f'h1_1PhotonClusterSize{suffix}', 30, 0, 30
|
|
)
|
|
histograms['h1_1PhotonClusterSize'].SetTitle('1-Photon Cluster Size;Cluster Size [pixel];Counts')
|
|
|
|
histograms['h1_1PhotonClusterDiameterX'] = TH1D(
|
|
f'h1_1PhotonClusterDiameterX{suffix}', f'h1_1PhotonClusterDiameterX{suffix}', 10, 0, 10
|
|
)
|
|
histograms['h1_1PhotonClusterDiameterX'].SetTitle('1-Photon Cluster Diameter X;Cluster Diameter X [pixel];Counts')
|
|
|
|
histograms['h1_1PhotonClusterDiameterY'] = TH1D(
|
|
f'h1_1PhotonClusterDiameterY{suffix}', f'h1_1PhotonClusterDiameterY{suffix}', 10, 0, 10
|
|
)
|
|
histograms['h1_1PhotonClusterDiameterY'].SetTitle('1-Photon Cluster Diameter Y;Cluster Diameter Y [pixel];Counts')
|
|
|
|
histograms['h1_1PhotonClusterPixelEnergy'] = TH1D(
|
|
f'h1_1PhotonClusterPixelEnergy{suffix}', f'h1_1PhotonClusterPixelEnergy{suffix}', nEnergyBins, -1, energy*1.5
|
|
)
|
|
histograms['h1_1PhotonClusterPixelEnergy'].SetTitle('1-Photon Cluster Pixel Energy;Energy [keV];Counts')
|
|
|
|
histograms['h1_1PhotonClusterEnergy'] = TH1D(
|
|
f'h1_1PhotonClusterEnergy{suffix}', f'h1_1PhotonClusterEnergy{suffix}', nEnergyBins, -1, energy*1.5
|
|
)
|
|
histograms['h1_1PhotonClusterEnergy'].SetTitle('1-Photon Cluster Energy;Energy [keV];Counts')
|
|
|
|
histograms['h1_1PhotonEta_X'] = TH1D(
|
|
f'h1_1PhotonEta_X{suffix}', f'h1_1PhotonEta_X{suffix}', 100, -0.5, 0.5
|
|
)
|
|
histograms['h1_1PhotonEta_X'].SetTitle('1-Photon Eta X;Eta X;Counts')
|
|
|
|
histograms['h1_1PhotonEta_Y'] = TH1D(
|
|
f'h1_1PhotonEta_Y{suffix}', f'h1_1PhotonEta_Y{suffix}', 100, -0.5, 0.5
|
|
)
|
|
histograms['h1_1PhotonEta_Y'].SetTitle('1-Photon Eta Y;Eta Y;Counts')
|
|
|
|
histograms['h1_2PhotonClusterEnergy'] = TH1D(
|
|
f'h1_2PhotonClusterEnergy{suffix}', f'h1_2PhotonClusterEnergy{suffix}', nEnergyBins, -1, energy*4
|
|
)
|
|
histograms['h1_2PhotonClusterEnergy'].SetTitle('2-Photon Cluster Energy;Energy [keV];Counts')
|
|
|
|
histograms['h1_2PhotonClusterSize'] = TH1D(
|
|
f'h1_2PhotonClusterSize{suffix}', f'h1_2PhotonClusterSize{suffix}', 30, 0, 30
|
|
)
|
|
histograms['h1_2PhotonClusterSize'].SetTitle('2-Photon Cluster Size;Cluster Size [pixel];Counts')
|
|
|
|
histograms['h1_2PhotonClusterDiameterX'] = TH1D(
|
|
f'h1_2PhotonClusterDiameterX{suffix}', f'h1_2PhotonClusterDiameterX{suffix}', 10, 0, 10
|
|
)
|
|
histograms['h1_2PhotonClusterDiameterX'].SetTitle('2-Photon Cluster Diameter X;Cluster Diameter X [pixel];Counts')
|
|
|
|
histograms['h1_2PhotonClusterDiameterY'] = TH1D(
|
|
f'h1_2PhotonClusterDiameterY{suffix}', f'h1_2PhotonClusterDiameterY{suffix}', 10, 0, 10
|
|
)
|
|
histograms['h1_2PhotonClusterDiameterY'].SetTitle('2-Photon Cluster Diameter Y;Cluster Diameter Y [pixel];Counts')
|
|
|
|
histograms['h1_2PhotonClusterPixelEnergy'] = TH1D(
|
|
f'h1_2PhotonClusterPixelEnergy{suffix}', f'h1_2PhotonClusterPixelEnergy{suffix}', nEnergyBins, -1, energy*2.5
|
|
)
|
|
histograms['h1_2PhotonClusterPixelEnergy'].SetTitle('2-Photon Cluster Pixel Energy;Energy [keV];Counts')
|
|
|
|
histograms['h2_SumFrame'] = TH2D(
|
|
f'h2_SumFrame{suffix}', f'h2_SumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
|
)
|
|
histograms['h2_SumFrame'].SetTitle('Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
|
|
|
histograms['h2_HitsSumFrame'] = TH2D(
|
|
f'h2_HitsSumFrame{suffix}', f'h2_HitsSumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
|
)
|
|
histograms['h2_HitsSumFrame'].SetTitle('Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
|
|
|
histograms['h2_1PhotonSumFrame'] = TH2D(
|
|
f'h2_1PhotonSumFrame{suffix}', f'h2_1PhotonSumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
|
)
|
|
histograms['h2_1PhotonSumFrame'].SetTitle('1-Photon Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
|
|
|
histograms['h2_1PhotonSumSample'] = TH2D(
|
|
f'h2_1PhotonSumSample{suffix}', f'h2_1PhotonSumSample{suffix}', clusterSize1Photon, 0, clusterSize1Photon, clusterSize1Photon, 0, clusterSize1Photon
|
|
)
|
|
histograms['h2_1PhotonSumSample'].SetTitle('1-Photon Sum Sample;Sum Sample;Counts')
|
|
|
|
histograms['h2_2PhotonSumSample'] = TH2D(
|
|
f'h2_2PhotonSumSample{suffix}', f'h2_2PhotonSumSample{suffix}', clusterSize2Photon, 0, clusterSize2Photon, clusterSize2Photon, 0, clusterSize2Photon
|
|
)
|
|
histograms['h2_2PhotonSumSample'].SetTitle('2-Photon Sum Sample;Sum Sample;Counts')
|
|
|
|
histograms['h2_2PhotonSumFrame'] = TH2D(
|
|
f'h2_2PhotonSumFrame{suffix}', f'h2_2PhotonSumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
|
)
|
|
histograms['h2_2PhotonSumFrame'].SetTitle('2-Photon Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
|
|
|
histograms['h2_ClusterSizeVsEnergy'] = TH2D(
|
|
f'h2_ClusterSizeVsEnergy{suffix}', f'h2_ClusterSizeVsEnergy{suffix}', nEnergyBins, -1, histMaxEnergy, 30, 0, 30
|
|
)
|
|
histograms['h2_ClusterSizeVsEnergy'].SetTitle('Cluster Size vs Energy;Energy [keV];Cluster Size [pixel]')
|
|
|
|
histograms['h2_DiameterVsEnergy'] = TH2D(
|
|
f'h2_DiameterVsEnergy{suffix}', f'h2_DiameterVsEnergy{suffix}', nEnergyBins, -1, histMaxEnergy, 10, 0, 10
|
|
)
|
|
histograms['h2_DiameterVsEnergy'].SetTitle('Cluster Diameter vs Energy;Energy [keV];Cluster Diameter [pixel]')
|
|
|
|
for hist in histograms.values():
|
|
hist.SetDirectory(0)
|
|
|
|
return histograms
|
|
|
|
def _processFrames(idxChunk): ### for both single and double photon events, using
|
|
startFrame, endFrame = _cfg['NFrame'] * idxChunk // nChunks, min(_cfg['NFrame'] * (idxChunk + 1) // nChunks, _cfg['NFrame'])
|
|
print(f'Processing frames from {startFrame} to {endFrame}...')
|
|
Energy = _cfg['energy']
|
|
selectionRange = _cfg['selectionRange']
|
|
signalFileNames = _cfg['signalFileNames']
|
|
NX = _cfg['NX']
|
|
NY = _cfg['NY']
|
|
headerSize = _cfg['headerSize']
|
|
Roi = _cfg['Roi']
|
|
nFramePerFile = os.path.getsize(f'{signalFileNames[0]}') // (NX * NY + 56) // 2
|
|
_hists = bookHistograms(Energy, f'_{idxChunk}')
|
|
sumFrame = np.zeros((NY, NX), dtype=np.float64)
|
|
|
|
### create a cluster finder
|
|
nxROI = Roi[1] - Roi[0]
|
|
nyROI = Roi[3] - Roi[2]
|
|
CF = aare.VarClusterFinder((nyROI, nxROI), 5) ### arg1: frame size, arg2: threshold
|
|
CF.set_peripheralThresholdFactor(3)
|
|
CF.set_noiseMap(_noiseEneFrame)
|
|
|
|
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
|
h5_1Photon_file = h5py.File(f'{_cfg["outputFolder"]}/1Photon_CS{clusterSize1Photon}_chunk{idxChunk}.h5', 'w')
|
|
dset_1Photon_clusters = h5_1Photon_file.create_dataset(
|
|
'clusters', (0, clusterSize1Photon, clusterSize1Photon), maxshape=(None, clusterSize1Photon, clusterSize1Photon), dtype='f4',
|
|
chunks=True, compression='gzip'
|
|
)
|
|
dset_1photon_refs = h5_1Photon_file.create_dataset(
|
|
'referencePoint', (0, 2), maxshape=(None, 2), dtype='i4',
|
|
chunks=True
|
|
)
|
|
|
|
cluster_1Photon_list = []
|
|
refpoint_1Photon_list = []
|
|
|
|
h5_2Photon_file = h5py.File(f'{_cfg["outputFolder"]}/2Photon_CS{clusterSize2Photon}_chunk{idxChunk}.h5', 'w')
|
|
dset_2Photon_clusters = h5_2Photon_file.create_dataset(
|
|
'clusters', (0, clusterSize2Photon, clusterSize2Photon), maxshape=(None, clusterSize2Photon, clusterSize2Photon), dtype='f4',
|
|
chunks=True, compression='gzip'
|
|
)
|
|
dset_2Photon_refs = h5_2Photon_file.create_dataset(
|
|
'referencePoint', (0, 2), maxshape=(None, 2), dtype='i4',
|
|
chunks=True
|
|
)
|
|
cluster_2Photon_list = []
|
|
refpoint_2Photon_list = []
|
|
BUFFER_THRESHOLD = 10000
|
|
def flush_h5_buffer():
|
|
if not cluster_1Photon_list: return # no new clusters to write
|
|
old_len = dset_1Photon_clusters.shape[0]
|
|
new_len = old_len + len(cluster_1Photon_list)
|
|
dset_1Photon_clusters.resize((new_len, clusterSize1Photon, clusterSize1Photon))
|
|
dset_1photon_refs.resize((new_len, 2))
|
|
dset_1Photon_clusters[old_len:new_len] = cluster_1Photon_list
|
|
dset_1photon_refs[old_len:new_len] = refpoint_1Photon_list
|
|
cluster_1Photon_list.clear()
|
|
refpoint_1Photon_list.clear()
|
|
|
|
old_len = dset_2Photon_clusters.shape[0]
|
|
new_len = old_len + len(cluster_2Photon_list)
|
|
dset_2Photon_clusters.resize((new_len, clusterSize2Photon, clusterSize2Photon))
|
|
dset_2Photon_refs.resize((new_len, 2))
|
|
dset_2Photon_clusters[old_len:new_len] = cluster_2Photon_list
|
|
dset_2Photon_refs[old_len:new_len] = refpoint_2Photon_list
|
|
cluster_2Photon_list.clear()
|
|
refpoint_2Photon_list.clear()
|
|
|
|
for idxFrame in range(startFrame, endFrame):
|
|
idxFile, idxFrame = divmod(idxFrame, nFramePerFile)
|
|
try:
|
|
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
|
|
offset = (headerSize + idxFrame * (NX * NY//2 + headerSize)) * 2
|
|
fileName1 = signalFileNames[idxFile]
|
|
fileName2 = fileName1.replace('d0', 'd1')
|
|
_signalAduFrame1 = np.fromfile(f'{fileName1}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
|
|
_signalAduFrame2 = np.fromfile(f'{fileName2}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
|
|
signalAduFrame = np.concatenate([_signalAduFrame1, _signalAduFrame2], axis=0)
|
|
del _signalAduFrame1, _signalAduFrame2
|
|
else:
|
|
offset = (headerSize + idxFrame * (NX * NY + headerSize)) * 2
|
|
signalAduFrame = np.fromfile(f'{signalFileNames[idxFile]}', dtype=np.uint16, offset=offset, count=NX * NY).astype(np.int32).reshape(NX, NY)
|
|
except Exception as e:
|
|
print(f'Error reading frame {idxFrame+startFrame} from file {signalFileNames[idxFile]}: {e}, stop processing this chunk ({startFrame}-{endFrame})')
|
|
return _hists
|
|
|
|
signalAduFrame = signalAduFrame - _pedestalAduFrame
|
|
signalEneFrame = convertAduFrameToEnergyFrame(signalAduFrame)
|
|
sumFrame += signalEneFrame
|
|
|
|
signalEneFrame = signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]]
|
|
CF.find_clusters_X(signalEneFrame)
|
|
clusters = CF.hits()
|
|
for idxCluster in range(len(clusters)):
|
|
clusterSize = clusters['size'][idxCluster]
|
|
energy = clusters['energy'][idxCluster]
|
|
_hists['h1_ClusterEnergy'].Fill(energy)
|
|
xs = clusters['cols'][idxCluster][:clusterSize]
|
|
ys = clusters['rows'][idxCluster][:clusterSize]
|
|
enes = clusters['enes'][idxCluster][:clusterSize]
|
|
|
|
### single photon events
|
|
if Energy - selectionRange < energy < Energy + selectionRange:
|
|
ref_x = clusters['col'][idxCluster] - 1 ### refered to the lower-left corner of the cluster
|
|
ref_y = clusters['row'][idxCluster] - 1
|
|
if int(ref_x) < 0 or int(ref_x)+clusterSize1Photon > signalEneFrame.shape[1] or int(ref_y) < 0 or int(ref_y)+clusterSize1Photon > signalEneFrame.shape[0]:
|
|
continue ### skip clusters too close to the border
|
|
for i in range(len(xs)):
|
|
_hists['h2_1PhotonSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
|
|
_hists['h1_1PhotonClusterSize'].Fill(clusterSize)
|
|
diameterX = max(xs) - min(xs) + 1
|
|
diameterY = max(ys) - min(ys) + 1
|
|
_hists['h1_1PhotonClusterDiameterX'].Fill(diameterX)
|
|
_hists['h1_1PhotonClusterDiameterY'].Fill(diameterY)
|
|
|
|
cluster_1Photon = np.zeros((clusterSize1Photon, clusterSize1Photon), dtype=np.float32)
|
|
# for i in range(len(xs)):
|
|
# x_rel = xs[i] - int(ref_x)
|
|
# y_rel = ys[i] - int(ref_y)
|
|
# _hists['h1_1PhotonClusterPixelEnergy'].Fill(enes[i])
|
|
# if 0 <= x_rel < clusterSize1Photon and 0 <= y_rel < clusterSize1Photon:
|
|
# cluster_1Photon[y_rel, x_rel] = enes[i]
|
|
cluster_1Photon = signalEneFrame[int(ref_y):int(ref_y)+clusterSize1Photon, int(ref_x):int(ref_x)+clusterSize1Photon]
|
|
_hists['h1_1PhotonClusterEnergy'].Fill(np.sum(cluster_1Photon)) # use the sum of 3x3 cluster as the cluster energy for single photon events, which is more accurate than the original cluster energy given by the cluster finder, since the cluster finder may miss some pixels with low energy far from the center pixel
|
|
for i in range(3):
|
|
for j in range(3):
|
|
_hists['h2_1PhotonSumSample'].Fill(i, j, cluster_1Photon[i, j])
|
|
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
|
cluster_1Photon_list.append(cluster_1Photon)
|
|
refpoint_1Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]])
|
|
|
|
sumEne = np.sum(cluster_1Photon)
|
|
### calculate eta
|
|
projectedX = np.sum(cluster_1Photon, axis=0)
|
|
etaX = (projectedX[2] - projectedX[0]) / sumEne
|
|
projectedY = np.sum(cluster_1Photon, axis=1)
|
|
etaY = (projectedY[2] - projectedY[0]) / sumEne
|
|
_hists['h1_1PhotonEta_X'].Fill(etaX)
|
|
_hists['h1_1PhotonEta_Y'].Fill(etaY)
|
|
|
|
### double photon events
|
|
if 2 * Energy - 2 * selectionRange < energy < 2 * Energy + 2 * selectionRange:
|
|
x_center = np.sum(xs * enes) / np.sum(enes)
|
|
y_center = np.sum(ys * enes) / np.sum(enes)
|
|
ref_x = int(x_center - clusterSize2Photon / 2) + 1 ### refered to the lower-left corner of the cluster
|
|
ref_y = int(y_center - clusterSize2Photon / 2) + 1
|
|
|
|
if int(ref_x) < 0 or int(ref_x)+clusterSize2Photon > signalEneFrame.shape[1] or int(ref_y) < 0 or int(ref_y)+clusterSize2Photon > signalEneFrame.shape[0]:
|
|
continue ### skip clusters too close to the border
|
|
for i in range(len(xs)):
|
|
_hists['h2_2PhotonSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
|
|
_hists['h1_2PhotonClusterPixelEnergy'].Fill(enes[i])
|
|
_hists['h1_2PhotonClusterSize'].Fill(clusterSize)
|
|
diameterX = max(xs) - min(xs) + 1
|
|
diameterY = max(ys) - min(ys) + 1
|
|
_hists['h1_2PhotonClusterDiameterX'].Fill(diameterX)
|
|
_hists['h1_2PhotonClusterDiameterY'].Fill(diameterY)
|
|
|
|
cluster_2Photon = np.zeros((clusterSize2Photon, clusterSize2Photon), dtype=np.float32)
|
|
for i in range(len(xs)):
|
|
x_rel = xs[i] - int(ref_x)
|
|
y_rel = ys[i] - int(ref_y)
|
|
if 0 <= x_rel < clusterSize2Photon and 0 <= y_rel < clusterSize2Photon:
|
|
cluster_2Photon[y_rel, x_rel] = enes[i]
|
|
_hists['h2_2PhotonSumSample'].Fill(x_rel, y_rel, enes[i])
|
|
# cluster_2Photon = signalEneFrame[int(ref_y):int(ref_y)+clusterSize2Photon, int(ref_x):int(ref_x)+clusterSize2Photon]
|
|
_hists['h1_2PhotonClusterEnergy'].Fill(np.sum(cluster_2Photon))
|
|
for i in range(clusterSize2Photon):
|
|
for j in range(clusterSize2Photon):
|
|
_hists['h2_2PhotonSumSample'].Fill(i, j, cluster_2Photon[i, j])
|
|
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
|
cluster_2Photon_list.append(cluster_2Photon)
|
|
refpoint_2Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]])
|
|
|
|
_hists['h2_ClusterSizeVsEnergy'].Fill(energy, clusterSize)
|
|
_diameterX = max(xs) - min(xs) + 1
|
|
_diameterY = max(ys) - min(ys) + 1
|
|
_diameter = max(_diameterX, _diameterY)
|
|
_hists['h2_DiameterVsEnergy'].Fill(energy, _diameter)
|
|
for i in range(len(xs)):
|
|
_hists['h1_PixelEnergy'].Fill(enes[i])
|
|
_hists['h2_HitsSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
|
|
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
|
if len(cluster_1Photon_list) > BUFFER_THRESHOLD or len(cluster_2Photon_list) > BUFFER_THRESHOLD:
|
|
flush_h5_buffer()
|
|
|
|
for _x in range(Roi[0], Roi[1]):
|
|
for _y in range(Roi[2], Roi[3]):
|
|
_hists['h2_SumFrame'].Fill(_x, _y, sumFrame[_y, _x])
|
|
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
|
flush_h5_buffer()
|
|
h5_1Photon_file.close()
|
|
h5_2Photon_file.close()
|
|
return _hists
|
|
|
|
def process():
|
|
import os
|
|
os.makedirs(_cfg['outputFolder'], exist_ok=True)
|
|
with Pool(_cfg['NThread'], maxtasksperchild=1) as p:
|
|
results = p.map(_processFrames, range(nChunks))
|
|
|
|
global hists
|
|
hists = bookHistograms(_cfg['energy'])
|
|
for h in hists.values():
|
|
h.SetDirectory(0)
|
|
|
|
for _hists in results:
|
|
for key in hists.keys():
|
|
hists[key].Add(_hists[key])
|
|
del _hists
|
|
|
|
outputTFileName = f'/home/xie_x1/MLXID/DataProcess/Results/{_cfg["runName"]}.root'
|
|
outputTFile = TFile(outputTFileName, 'RECREATE')
|
|
for hist in hists.values():
|
|
hist.Write()
|
|
outputTFile.Close()
|
|
|
|
def getPedestalAndNoise():
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
pedestalFileName = _cfg['pedestalFileName']
|
|
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
|
|
pedestalFileName2 = pedestalFileName.replace('d0', 'd1')
|
|
_pedestalAduFrames1 = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
_pedestalAduFrames1 = _pedestalAduFrames1.reshape(-1, NX * NY //2 + 56)
|
|
_pedestalAduFrames1 = _pedestalAduFrames1[:, 56:].reshape(-1, NY//2, NX)
|
|
_pedestalAduFrames2 = np.fromfile(f'{pedestalFileName2}', dtype=np.uint16).astype(np.int32)
|
|
_pedestalAduFrames2 = _pedestalAduFrames2.reshape(-1, NX * NY //2 + 56)
|
|
_pedestalAduFrames2 = _pedestalAduFrames2[:, 56:].reshape(-1, NY//2, NX)
|
|
pedestalAduFrames = np.concatenate([_pedestalAduFrames1, _pedestalAduFrames2], axis=1)
|
|
del _pedestalAduFrames1, _pedestalAduFrames2
|
|
else:
|
|
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
|
|
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
|
|
|
|
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
|
|
global _pedestalAduFrame, _noiseEneFrame
|
|
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
|
|
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
|
|
with Pool(16) as p:
|
|
pedestalEneFrames = p.map(convertAduFrameToEnergyFrame, pedestalAduFrames)
|
|
_noiseEneFrame = np.std(pedestalEneFrames, axis=0, ddof=1)
|
|
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
|
|
del pedestalAduFrames, pedestalEneFrames
|
|
|
|
def getPedestalAndNoise_simplified():
|
|
NX, NY = _cfg['NX'], _cfg['NY']
|
|
pedestalFileName = _cfg['pedestalFileName']
|
|
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
|
|
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
|
|
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
|
|
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
|
|
global _pedestalAduFrame, _noiseEneFrame
|
|
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
|
|
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
|
|
_noiseEneFrame = convertAduFrameToEnergyFrame(noiseAduFrame)
|
|
|
|
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
|
|
del pedestalAduFrames
|