Add 2photon cluster writing; Clean codes
This commit is contained in:
138
UsefulFuncs.py
138
UsefulFuncs.py
@@ -61,42 +61,36 @@ def bookHistograms(energy, suffix='', energyBinWidth=0.1, isMC=False):
|
||||
histograms['h1_ClusterEnergy'] = TH1D(
|
||||
f'h1_ClusterEnergy{suffix}', f'h1_ClusterEnergy{suffix}', nEnergyBins, -1, histMaxEnergy
|
||||
)
|
||||
histograms['h1_ClusterEnergy'].SetTitle('Cluster Energy;Energy [keV];Counts')
|
||||
|
||||
histograms['h1_PixelEnergy'] = TH1D(
|
||||
f'h1_PixelEnergy{suffix}', f'h1_PixelEnergy{suffix}', nEnergyBins, -1, histMaxEnergy
|
||||
)
|
||||
histograms['h1_CenterPixelEnergy'] = TH1D(
|
||||
f'h1_CenterPixelEnergy{suffix}', f'h1_CenterPixelEnergy{suffix}', nEnergyBins, -1, histMaxEnergy
|
||||
)
|
||||
histograms['h2_Qc'] = TH2D(
|
||||
f'h2_Qc{suffix}', f'h2_Qc{suffix}', 201, 0, 1, 201, 0, 1
|
||||
)
|
||||
histograms['h2_counts'] = TH2D(
|
||||
f'h2_counts{suffix}', f'h2_counts{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
||||
)
|
||||
histograms['h1_PixelEnergy'].SetTitle('Pixel Energy;Energy [keV];Counts')
|
||||
|
||||
histograms['h2_HitsSumFrame'] = TH2D(
|
||||
f'h2_HitsSumFrame{suffix}', f'h2_HitsSumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
||||
)
|
||||
histograms['h2_interpolated'] = TH2D(
|
||||
f'h2_interpolated{suffix}', f'h2_interpolated{suffix}', roi_width * interpolationBins, roi_x0, roi_x1, roi_height * interpolationBins, roi_y0, roi_y1
|
||||
histograms['h2_HitsSumFrame'].SetTitle('Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
||||
|
||||
histograms['h2_DoubleHitsSumFrame'] = TH2D(
|
||||
f'h2_DoubleHitsSumFrame{suffix}', f'h2_DoubleHitsSumFrame{suffix}', roi_width, roi_x0, roi_x1, roi_height, roi_y0, roi_y1
|
||||
)
|
||||
histograms['h2_interpolatedSubpixel'] = TH2D(
|
||||
f'h2_interpolatedSubpixel{suffix}', f'h2_interpolatedSubpixel{suffix}', interpolationBins, 0, 1, interpolationBins, 0, 1
|
||||
histograms['h2_DoubleHitsSumFrame'].SetTitle('Double Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]')
|
||||
|
||||
histograms['h2_ClusterSizeVsEnergy'] = TH2D(
|
||||
f'h2_ClusterSizeVsEnergy{suffix}', f'h2_ClusterSizeVsEnergy{suffix}', nEnergyBins, -1, histMaxEnergy, 30, 0, 30
|
||||
)
|
||||
histograms['h2_ClusterSizeVsEnergy'].SetTitle('Cluster Size vs Energy;Energy [keV];Cluster Size [pixel]')
|
||||
|
||||
histograms['h2_DiameterVsEnergy'] = TH2D(
|
||||
f'h2_DiameterVsEnergy{suffix}', f'h2_DiameterVsEnergy{suffix}', nEnergyBins, -1, histMaxEnergy, 10, 0, 10
|
||||
)
|
||||
histograms['h2_DiameterVsEnergy'].SetTitle('Cluster Diameter vs Energy;Energy [keV];Cluster Diameter [pixel]')
|
||||
|
||||
for hist in histograms.values():
|
||||
hist.SetDirectory(0)
|
||||
|
||||
histograms['h1_ClusterEnergy'].SetTitle('Cluster Energy;Energy [keV];Counts')
|
||||
histograms['h1_PixelEnergy'].SetTitle('Pixel Energy;Energy [keV];Counts')
|
||||
histograms['h1_CenterPixelEnergy'].SetTitle('Center Pixel Energy;Energy [keV];Counts')
|
||||
histograms['h2_Qc'].SetTitle('Charge weighted center;#eta_x;#eta_y;Counts')
|
||||
|
||||
if isMC:
|
||||
histograms['h1_ChargeCollectionEfficiency'] = TH1D(f'h1_ChargeCollectionEfficiency{suffix}', f'h1_ChargeCollectionEfficiency{suffix}', 400, 0.8, 1.01)
|
||||
histograms['h1_ChargeCollectionEfficiency'].SetDirectory(0)
|
||||
for key, hist in list(histograms.items()):
|
||||
histograms[key] = hist.Clone()
|
||||
|
||||
return histograms
|
||||
|
||||
def _processFrames(idxChunk): ### for both single and double photon events, using
|
||||
@@ -116,35 +110,53 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin
|
||||
nxROI = Roi[1] - Roi[0]
|
||||
nyROI = Roi[3] - Roi[2]
|
||||
CF = aare.VarClusterFinder((nxROI, nyROI), 5) ### arg1: frame size, arg2: threshold
|
||||
CF.set_peripheralThresholdFactor(2)
|
||||
CF.set_peripheralThresholdFactor(3)
|
||||
CF.set_noiseMap(_noiseEneFrame)
|
||||
|
||||
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
||||
h5_file = h5py.File(f'clusters_chunk{idxChunk}.h5', 'w')
|
||||
# 创建可扩展数据集
|
||||
dset_clusters = h5_file.create_dataset(
|
||||
h5_1Photon_file = h5py.File(f'{_cfg["outputFolder"]}/Clusters_1Photon_chunk{idxChunk}.h5', 'w')
|
||||
dset_1Photon_clusters = h5_1Photon_file.create_dataset(
|
||||
'clusters', (0, 3, 3), maxshape=(None, 3, 3), dtype='f4',
|
||||
chunks=True, compression='gzip'
|
||||
)
|
||||
dset_refs = h5_file.create_dataset(
|
||||
dset_1photon_refs = h5_1Photon_file.create_dataset(
|
||||
'referencePoint', (0, 2), maxshape=(None, 2), dtype='i4',
|
||||
chunks=True
|
||||
)
|
||||
|
||||
buffer_size = 50_000
|
||||
cluster_list = []
|
||||
refpoint_list = []
|
||||
cluster_1Photon_list = []
|
||||
refpoint_1Photon_list = []
|
||||
|
||||
h5_2Photon_file = h5py.File(f'{_cfg["outputFolder"]}/Clusters_2Photon_chunk{idxChunk}.h5', 'w')
|
||||
dset_2Photon_clusters = h5_2Photon_file.create_dataset(
|
||||
'clusters', (0, 6, 6), maxshape=(None, 6, 6), dtype='f4',
|
||||
chunks=True, compression='gzip'
|
||||
)
|
||||
dset_2Photon_refs = h5_2Photon_file.create_dataset(
|
||||
'referencePoint', (0, 2), maxshape=(None, 2), dtype='i4',
|
||||
chunks=True
|
||||
)
|
||||
cluster_2Photon_list = []
|
||||
refpoint_2Photon_list = []
|
||||
|
||||
def flush_h5_buffer():
|
||||
if not cluster_list:
|
||||
return
|
||||
old_len = dset_clusters.shape[0]
|
||||
new_len = old_len + len(cluster_list)
|
||||
dset_clusters.resize((new_len, 3, 3))
|
||||
dset_refs.resize((new_len, 2))
|
||||
dset_clusters[old_len:new_len] = cluster_list
|
||||
dset_refs[old_len:new_len] = refpoint_list
|
||||
cluster_list.clear()
|
||||
refpoint_list.clear()
|
||||
old_len = dset_1Photon_clusters.shape[0]
|
||||
new_len = old_len + len(cluster_1Photon_list)
|
||||
dset_1Photon_clusters.resize((new_len, 3, 3))
|
||||
dset_1photon_refs.resize((new_len, 2))
|
||||
dset_1Photon_clusters[old_len:new_len] = cluster_1Photon_list
|
||||
dset_1photon_refs[old_len:new_len] = refpoint_1Photon_list
|
||||
cluster_1Photon_list.clear()
|
||||
refpoint_1Photon_list.clear()
|
||||
|
||||
old_len = dset_2Photon_clusters.shape[0]
|
||||
new_len = old_len + len(cluster_2Photon_list)
|
||||
dset_2Photon_clusters.resize((new_len, 6, 6))
|
||||
dset_2Photon_refs.resize((new_len, 2))
|
||||
dset_2Photon_clusters[old_len:new_len] = cluster_2Photon_list
|
||||
dset_2Photon_refs[old_len:new_len] = refpoint_2Photon_list
|
||||
cluster_2Photon_list.clear()
|
||||
refpoint_2Photon_list.clear()
|
||||
|
||||
for idxFrame in range(startFrame, endFrame):
|
||||
if idxFrame % 10000 == 0:
|
||||
@@ -172,20 +184,44 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin
|
||||
CF.find_clusters_X(signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]])
|
||||
clusters = CF.hits()
|
||||
for idxCluster in range(len(clusters)):
|
||||
clusterSize = clusters['size'][idxCluster]
|
||||
energy = clusters['energy'][idxCluster]
|
||||
_hists['h1_ClusterEnergy'].Fill(energy)
|
||||
xs = clusters['cols'][idxCluster]
|
||||
ys = clusters['rows'][idxCluster]
|
||||
enes = clusters['enes'][idxCluster]
|
||||
xs = clusters['cols'][idxCluster][:clusterSize]
|
||||
ys = clusters['rows'][idxCluster][:clusterSize]
|
||||
enes = clusters['enes'][idxCluster][:clusterSize]
|
||||
|
||||
if energy < 5:
|
||||
continue
|
||||
|
||||
if 2 * Energy - selectionRange < energy < 2 * Energy + selectionRange:
|
||||
for i in range(len(xs)):
|
||||
_hists['h2_DoubleHitsSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
|
||||
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
||||
ref_x = (min(xs) + max(xs)) // 2
|
||||
ref_y = (min(ys) + max(ys)) // 2
|
||||
cluster_2Photon = np.zeros((6, 6), dtype=np.float32)
|
||||
for i in range(len(xs)):
|
||||
x_rel = xs[i] - int(ref_x) + 3
|
||||
y_rel = ys[i] - int(ref_y) + 3
|
||||
if 0 <= x_rel < 6 and 0 <= y_rel < 6:
|
||||
cluster_2Photon[y_rel, x_rel] = enes[i]
|
||||
cluster_2Photon_list.append(cluster_2Photon)
|
||||
refpoint_2Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]])
|
||||
|
||||
_hists['h2_ClusterSizeVsEnergy'].Fill(energy, clusterSize)
|
||||
_diameterX = max(xs) - min(xs) + 1
|
||||
_diameterY = max(ys) - min(ys) + 1
|
||||
_diameter = max(_diameterX, _diameterY)
|
||||
_hists['h2_DiameterVsEnergy'].Fill(energy, _diameter)
|
||||
for i in range(len(xs)):
|
||||
_hists['h1_PixelEnergy'].Fill(enes[i])
|
||||
_hists['h2_HitsSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
|
||||
|
||||
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
|
||||
flush_h5_buffer()
|
||||
h5_file.close()
|
||||
h5_1Photon_file.close()
|
||||
h5_2Photon_file.close()
|
||||
return _hists
|
||||
|
||||
def process():
|
||||
@@ -198,14 +234,8 @@ def process():
|
||||
h.SetDirectory(0)
|
||||
|
||||
for _hists in results:
|
||||
hists['h1_ClusterEnergy'].Add(_hists['h1_ClusterEnergy'])
|
||||
hists['h1_PixelEnergy'].Add(_hists['h1_PixelEnergy'])
|
||||
hists['h1_CenterPixelEnergy'].Add(_hists['h1_CenterPixelEnergy'])
|
||||
hists['h2_Qc'].Add(_hists['h2_Qc'])
|
||||
hists['h2_interpolated'].Add(_hists['h2_interpolated'])
|
||||
hists['h2_interpolatedSubpixel'].Add(_hists['h2_interpolatedSubpixel'])
|
||||
hists['h2_counts'].Add(_hists['h2_counts'])
|
||||
hists['h2_HitsSumFrame'].Add(_hists['h2_HitsSumFrame'])
|
||||
for key in hists.keys():
|
||||
hists[key].Add(_hists[key])
|
||||
del _hists
|
||||
|
||||
def getPedestalAndNoise():
|
||||
|
||||
Reference in New Issue
Block a user