diff --git a/UsefulFuncs.py b/UsefulFuncs.py index e8c9fe7..2bbdaf9 100644 --- a/UsefulFuncs.py +++ b/UsefulFuncs.py @@ -1,7 +1,7 @@ import os import numpy as np -from ROOT import TH1D, TH2D, TCanvas, TF1 +from ROOT import TH1D, TH2D, TCanvas, TF1, TFile from multiprocessing import Pool from array import array import h5py @@ -13,6 +13,7 @@ _aduToKev3DMap = None ### global caliFile _pedestalAduFrame = None ### global pedestalAduFrame _noiseEneFrame = None ### global noiseEneFrame nChunks = 16 +clusterSize2Photon = 7 def init(cfg): global _cfg @@ -127,9 +128,9 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin cluster_1Photon_list = [] refpoint_1Photon_list = [] - h5_2Photon_file = h5py.File(f'{_cfg["outputFolder"]}/Clusters_2Photon_chunk{idxChunk}.h5', 'w') + h5_2Photon_file = h5py.File(f'{_cfg["outputFolder"]}/Clusters_2Photon_CS{clusterSize2Photon}_chunk{idxChunk}.h5', 'w') dset_2Photon_clusters = h5_2Photon_file.create_dataset( - 'clusters', (0, 6, 6), maxshape=(None, 6, 6), dtype='f4', + 'clusters', (0, clusterSize2Photon, clusterSize2Photon), maxshape=(None, clusterSize2Photon, clusterSize2Photon), dtype='f4', chunks=True, compression='gzip' ) dset_2Photon_refs = h5_2Photon_file.create_dataset( @@ -151,7 +152,7 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin old_len = dset_2Photon_clusters.shape[0] new_len = old_len + len(cluster_2Photon_list) - dset_2Photon_clusters.resize((new_len, 6, 6)) + dset_2Photon_clusters.resize((new_len, clusterSize2Photon, clusterSize2Photon)) dset_2Photon_refs.resize((new_len, 2)) dset_2Photon_clusters[old_len:new_len] = cluster_2Photon_list dset_2Photon_refs[old_len:new_len] = refpoint_2Photon_list @@ -194,17 +195,18 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin if energy < 5: continue + ### detect single photon events if 2 * Energy - selectionRange < energy < 2 * Energy + selectionRange: for i in range(len(xs)): _hists['h2_DoubleHitsSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i]) if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: - ref_x = (min(xs) + max(xs)) // 2 - ref_y = (min(ys) + max(ys)) // 2 - cluster_2Photon = np.zeros((6, 6), dtype=np.float32) + ref_x = (min(xs) + max(xs)) // 2 - (clusterSize2Photon // 2) ### refered to the lower-left corner of the cluster + ref_y = (min(ys) + max(ys)) // 2 - (clusterSize2Photon // 2) + cluster_2Photon = np.zeros((clusterSize2Photon, clusterSize2Photon), dtype=np.float32) for i in range(len(xs)): - x_rel = xs[i] - int(ref_x) + 3 - y_rel = ys[i] - int(ref_y) + 3 - if 0 <= x_rel < 6 and 0 <= y_rel < 6: + x_rel = xs[i] - int(ref_x) + y_rel = ys[i] - int(ref_y) + if 0 <= x_rel < clusterSize2Photon and 0 <= y_rel < clusterSize2Photon: cluster_2Photon[y_rel, x_rel] = enes[i] cluster_2Photon_list.append(cluster_2Photon) refpoint_2Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]]) @@ -237,6 +239,12 @@ def process(): for key in hists.keys(): hists[key].Add(_hists[key]) del _hists + + outputTFileName = f'/home/xie_x1/MLXID/DataProcess/Results/{_cfg["runName"]}.root' + outputTFile = TFile(outputTFileName, 'RECREATE') + for hist in hists.values(): + hist.Write() + outputTFile.Close() def getPedestalAndNoise(): NX, NY = _cfg['NX'], _cfg['NY']