diff --git a/Configs.py b/Configs.py index ef57a8f..1936b43 100644 --- a/Configs.py +++ b/Configs.py @@ -17,9 +17,9 @@ Configs['SiemenStarLowerLeft'] = { 'writeClusters': True, # 'outputFolder': '/mnt/sls_det_storage/moench_data/MLXID/Samples/Measurement/2504_SOLEIL_SiemenStarClusters_MOENCH040_150V/', 'outputFolder': '/home/xie_x1/MLXID/DataProcess/Samples/SiemenStarLowerLeft/', - 'NFrame': 2_000_0, ### 20_000_000 in total + 'NFrame': 20_000_000, ### 20_000_000 in total 'NThread': 16, - 'NChunks': 16, + 'NChunks': 160, 'runName': 'SiemenStarLowerLeft', } diff --git a/DataAnalysis.py b/DataAnalysis.py index d1c948a..e37aa42 100644 --- a/DataAnalysis.py +++ b/DataAnalysis.py @@ -1,12 +1,11 @@ import UsefulFuncs import numpy as np from Configs import Configs - -measurementConfig = Configs['SiemenStarLowerLeft'] ### SiemenStarLowerLeft, SiemenStarLowerRight - -# measurementConfig['writeClusters'] = True ### default is False -# measurementConfig['NFrame'] = 20_000_000 ### 20_000_000 in total - +from argparse import ArgumentParser +argParser = ArgumentParser() +argParser.add_argument('--runName', type=str, default='SiemenStarLowerLeft') +args = argParser.parse_args() +measurementConfig = Configs[args.runName] UsefulFuncs.init(measurementConfig) UsefulFuncs.getPedestalAndNoise_simplified() diff --git a/UsefulFuncs.py b/UsefulFuncs.py index 772eb78..65a4aed 100644 --- a/UsefulFuncs.py +++ b/UsefulFuncs.py @@ -14,12 +14,13 @@ _pedestalAduFrame = None ### global pedestalAduFrame _noiseEneFrame = None ### global noiseEneFrame nChunks = 16 clusterSize2Photon = 7 +clusterSize1Photon = 3 def init(cfg): global _cfg global _aduToKev3DMap _cfg = cfg - _aduToKev3DMap = cfg['caliFile'] + _aduToKev3DMap = np.load(cfg['caliFileName']) Roi = cfg['Roi'] global nChunks nChunks = _cfg.get('NChunks', 16) @@ -104,6 +105,11 @@ def bookHistograms(energy, suffix='', energyBinWidth=0.1, isMC=False): ) histograms['h1_1PhotonEta_Y'].SetTitle('1-Photon Eta Y;Eta Y;Counts') + histograms['h1_2PhotonClusterEnergy'] = TH1D( + f'h1_2PhotonClusterEnergy{suffix}', f'h1_2PhotonClusterEnergy{suffix}', nEnergyBins, -1, energy*4 + ) + histograms['h1_2PhotonClusterEnergy'].SetTitle('2-Photon Cluster Energy;Energy [keV];Counts') + histograms['h1_2PhotonClusterSize'] = TH1D( f'h1_2PhotonClusterSize{suffix}', f'h1_2PhotonClusterSize{suffix}', 30, 0, 30 ) @@ -139,6 +145,11 @@ def bookHistograms(energy, suffix='', energyBinWidth=0.1, isMC=False): ) histograms['h2_1PhotonSumFrame'].SetTitle('1-Photon Hits Sum Frame;X [pixel];Y [pixel];Energy Sum [keV]') + histograms['h2_1PhotonSumSample'] = TH2D( + f'h2_1PhotonSumSample{suffix}', f'h2_1PhotonSumSample{suffix}', clusterSize1Photon, 0, clusterSize1Photon, clusterSize1Photon, 0, clusterSize1Photon + ) + histograms['h2_1PhotonSumSample'].SetTitle('1-Photon Sum Sample;Sum Sample;Counts') + histograms['h2_2PhotonSumSample'] = TH2D( f'h2_2PhotonSumSample{suffix}', f'h2_2PhotonSumSample{suffix}', clusterSize2Photon, 0, clusterSize2Photon, clusterSize2Photon, 0, clusterSize2Photon ) @@ -186,9 +197,9 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin CF.set_noiseMap(_noiseEneFrame) if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: - h5_1Photon_file = h5py.File(f'{_cfg["outputFolder"]}/1Photon_CS3_chunk{idxChunk}.h5', 'w') + h5_1Photon_file = h5py.File(f'{_cfg["outputFolder"]}/1Photon_CS{clusterSize1Photon}_chunk{idxChunk}.h5', 'w') dset_1Photon_clusters = h5_1Photon_file.create_dataset( - 'clusters', (0, 3, 3), maxshape=(None, 3, 3), dtype='f4', + 'clusters', (0, clusterSize1Photon, clusterSize1Photon), maxshape=(None, clusterSize1Photon, clusterSize1Photon), dtype='f4', chunks=True, compression='gzip' ) dset_1photon_refs = h5_1Photon_file.create_dataset( @@ -210,11 +221,12 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin ) cluster_2Photon_list = [] refpoint_2Photon_list = [] - + BUFFER_THRESHOLD = 10000 def flush_h5_buffer(): + if not cluster_1Photon_list: return # no new clusters to write old_len = dset_1Photon_clusters.shape[0] new_len = old_len + len(cluster_1Photon_list) - dset_1Photon_clusters.resize((new_len, 3, 3)) + dset_1Photon_clusters.resize((new_len, clusterSize1Photon, clusterSize1Photon)) dset_1photon_refs.resize((new_len, 2)) dset_1Photon_clusters[old_len:new_len] = cluster_1Photon_list dset_1photon_refs[old_len:new_len] = refpoint_1Photon_list @@ -252,7 +264,8 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin signalEneFrame = convertAduFrameToEnergyFrame(signalAduFrame) sumFrame += signalEneFrame - CF.find_clusters_X(signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]]) + signalEneFrame = signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]] + CF.find_clusters_X(signalEneFrame) clusters = CF.hits() for idxCluster in range(len(clusters)): clusterSize = clusters['size'][idxCluster] @@ -264,6 +277,10 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin ### single photon events if Energy - selectionRange < energy < Energy + selectionRange: + ref_x = clusters['col'][idxCluster] - 1 ### refered to the lower-left corner of the cluster + ref_y = clusters['row'][idxCluster] - 1 + if int(ref_x) < 0 or int(ref_x)+clusterSize1Photon > signalEneFrame.shape[1] or int(ref_y) < 0 or int(ref_y)+clusterSize1Photon > signalEneFrame.shape[0]: + continue ### skip clusters too close to the border for i in range(len(xs)): _hists['h2_1PhotonSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i]) _hists['h1_1PhotonClusterSize'].Fill(clusterSize) @@ -271,18 +288,19 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin diameterY = max(ys) - min(ys) + 1 _hists['h1_1PhotonClusterDiameterX'].Fill(diameterX) _hists['h1_1PhotonClusterDiameterY'].Fill(diameterY) - _hists['h1_1PhotonClusterEnergy'].Fill(energy) - maxX, maxY = clusters['col'][idxCluster], clusters['row'][idxCluster] - ref_x = maxX - 1 ### refered to the lower-left corner of the cluster - ref_y = maxY - 1 - cluster_1Photon = np.zeros((3, 3), dtype=np.float32) - for i in range(len(xs)): - x_rel = xs[i] - int(ref_x) - y_rel = ys[i] - int(ref_y) - _hists['h1_1PhotonClusterPixelEnergy'].Fill(enes[i]) - if 0 <= x_rel < 3 and 0 <= y_rel < 3: - cluster_1Photon[y_rel, x_rel] = enes[i] + cluster_1Photon = np.zeros((clusterSize1Photon, clusterSize1Photon), dtype=np.float32) + # for i in range(len(xs)): + # x_rel = xs[i] - int(ref_x) + # y_rel = ys[i] - int(ref_y) + # _hists['h1_1PhotonClusterPixelEnergy'].Fill(enes[i]) + # if 0 <= x_rel < clusterSize1Photon and 0 <= y_rel < clusterSize1Photon: + # cluster_1Photon[y_rel, x_rel] = enes[i] + cluster_1Photon = signalEneFrame[int(ref_y):int(ref_y)+clusterSize1Photon, int(ref_x):int(ref_x)+clusterSize1Photon] + _hists['h1_1PhotonClusterEnergy'].Fill(np.sum(cluster_1Photon)) # use the sum of 3x3 cluster as the cluster energy for single photon events, which is more accurate than the original cluster energy given by the cluster finder, since the cluster finder may miss some pixels with low energy far from the center pixel + for i in range(3): + for j in range(3): + _hists['h2_1PhotonSumSample'].Fill(i, j, cluster_1Photon[i, j]) if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: cluster_1Photon_list.append(cluster_1Photon) refpoint_1Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]]) @@ -297,7 +315,14 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin _hists['h1_1PhotonEta_Y'].Fill(etaY) ### double photon events - if 2 * Energy - selectionRange < energy < 2 * Energy + selectionRange: + if 2 * Energy - 2 * selectionRange < energy < 2 * Energy + 2 * selectionRange: + x_center = np.sum(xs * enes) / np.sum(enes) + y_center = np.sum(ys * enes) / np.sum(enes) + ref_x = int(x_center - clusterSize2Photon / 2) + 1 ### refered to the lower-left corner of the cluster + ref_y = int(y_center - clusterSize2Photon / 2) + 1 + + if int(ref_x) < 0 or int(ref_x)+clusterSize2Photon > signalEneFrame.shape[1] or int(ref_y) < 0 or int(ref_y)+clusterSize2Photon > signalEneFrame.shape[0]: + continue ### skip clusters too close to the border for i in range(len(xs)): _hists['h2_2PhotonSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i]) _hists['h1_2PhotonClusterPixelEnergy'].Fill(enes[i]) @@ -307,8 +332,6 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin _hists['h1_2PhotonClusterDiameterX'].Fill(diameterX) _hists['h1_2PhotonClusterDiameterY'].Fill(diameterY) - ref_x = (min(xs) + max(xs)) // 2 - (clusterSize2Photon // 2) ### refered to the lower-left corner of the cluster - ref_y = (min(ys) + max(ys)) // 2 - (clusterSize2Photon // 2) cluster_2Photon = np.zeros((clusterSize2Photon, clusterSize2Photon), dtype=np.float32) for i in range(len(xs)): x_rel = xs[i] - int(ref_x) @@ -316,7 +339,11 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin if 0 <= x_rel < clusterSize2Photon and 0 <= y_rel < clusterSize2Photon: cluster_2Photon[y_rel, x_rel] = enes[i] _hists['h2_2PhotonSumSample'].Fill(x_rel, y_rel, enes[i]) - + # cluster_2Photon = signalEneFrame[int(ref_y):int(ref_y)+clusterSize2Photon, int(ref_x):int(ref_x)+clusterSize2Photon] + _hists['h1_2PhotonClusterEnergy'].Fill(np.sum(cluster_2Photon)) + for i in range(clusterSize2Photon): + for j in range(clusterSize2Photon): + _hists['h2_2PhotonSumSample'].Fill(i, j, cluster_2Photon[i, j]) if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: cluster_2Photon_list.append(cluster_2Photon) refpoint_2Photon_list.append([int(ref_x)+Roi[0], int(ref_y)+Roi[2]]) @@ -329,10 +356,12 @@ def _processFrames(idxChunk): ### for both single and double photon events, usin for i in range(len(xs)): _hists['h1_PixelEnergy'].Fill(enes[i]) _hists['h2_HitsSumFrame'].Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i]) + if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: + if len(cluster_1Photon_list) > BUFFER_THRESHOLD or len(cluster_2Photon_list) > BUFFER_THRESHOLD: + flush_h5_buffer() for _x in range(Roi[0], Roi[1]): for _y in range(Roi[2], Roi[3]): - signalEneFrame[_x, _y] _hists['h2_SumFrame'].Fill(_x, _y, sumFrame[_y, _x]) if 'writeClusters' in _cfg and _cfg['writeClusters'] == True: flush_h5_buffer()