This commit is contained in:
2025-11-05 11:08:42 +01:00
commit 8caa048da4
2 changed files with 296 additions and 0 deletions

52
DataAnalysis.py Normal file
View File

@@ -0,0 +1,52 @@
import UsefulFuncs
import imp
from glob import glob
from ROOT import *
import numpy as np
from array import array
import matplotlib.pyplot as plt
imp.reload(UsefulFuncs)
exptime = 50
gain = 'g4'
moenchCode = 'Moench040'
date = '250423'
hv = 150
Folder = f'/mnt/sls_det_storage/moench_data/xiangyu/{date}_BP_cali_{moenchCode}_{hv}V_{gain}_{exptime}us'
RootFileName = f'{moenchCode}_AduToKevMapping_{gain}_{exptime}us_{hv}V_{date}.root'
CaliLut3DFileName = f'/home/xie_x1/MLED/data-process/utils/BacksidePulsing_Calibration/Moench040/{moenchCode}_AduToKevMapping_{gain}_{exptime}us_{hv}V_{date}.npy'
XrayFolder = f'/mnt/sls_det_storage/moench_data/2504_SoleilBeamtime/{moenchCode}_g4_SimonStars_150V_50us'
Roi = [140, 230, 120, 210]
Energy = 15
measurementConfig = {
'caliFile': np.load(CaliLut3DFileName),
'pedestalFileName': f'{XrayFolder}/{Energy}keV_pedestal_d0_f0_0.raw',
'signalFileNames': glob(f'{XrayFolder}/{Energy}keV_signal_d0_f*_0.raw'),
'etaxLUT': '/mnt/sls_det_storage/moench_data/MLXID/EtaInterpolation/Moench040_EtaxLUT_g4_50us_150V_250423.npy',
'etayLUT': '/mnt/sls_det_storage/moench_data/MLXID/EtaInterpolation/Moench040_EtayLUT_g4_50us_150V_250423.npy',
'Roi': Roi,
'NX': 400, 'NY': 400,
'energy': Energy, # keV
'selectionRange': 2,
'NFrame': 20_000_000, ### 20_000_000 in total
'NThread': 16,
'headerSize': 56,
'clusterWidth': 3, ### cluster width, must be odd number
'write3x3Cluster': True,
'NChunks': 200,
}
UsefulFuncs.init(measurementConfig)
UsefulFuncs.getPedestalAndNoise_simplified()
UsefulFuncs.getHists()
nBinX = UsefulFuncs.h2_interpolated.GetXaxis().GetNbins()
nBinY = UsefulFuncs.h2_interpolated.GetYaxis().GetNbins()
frame_intepolated = np.zeros((nBinY, nBinX))
for ix in range(nBinX):
for iy in range(nBinY):
frame_intepolated[iy, ix] = UsefulFuncs.h2_interpolated.GetBinContent(ix+1, iy+1)
np.save('SiemenStar_interpolatedFrame.npy', frame_intepolated)

244
UsefulFuncs.py Normal file
View File

@@ -0,0 +1,244 @@
import os
import numpy as np
from ROOT import TH1D, TH2D, TCanvas, TF1
from multiprocessing import Pool
from array import array
import h5py
from multiprocessing import Pool
import aare
_cfg = {} ### global config
_aduToKev3DMap = None ### global caliFile
_pedestalAduFrame = None ### global pedestalAduFrame
_noiseEneFrame = None ### global noiseEneFrame
h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc = None, None, None, None
sumFrame = np.zeros((400, 400), dtype=np.float32) ### global sumFrame for all frames
EtaxLUT, EtayLUT = None, None ### global LUT for charge weighted center
nChunks = 16
def init(cfg):
global _cfg
global _aduToKev3DMap
_cfg = cfg
_aduToKev3DMap = cfg['caliFile']
Roi = cfg['Roi']
global sumFrame
sumFrame = sumFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]] ### crop the sumFrame to the ROI
if 'etaxLUT' in cfg and 'etayLUT' in cfg:
global EtaxLUT, EtayLUT
EtaxLUT = np.load(cfg['etaxLUT'])
EtayLUT = np.load(cfg['etayLUT'])
global nChunks
nChunks = _cfg.get('NChunks', 16)
def convertAduFrameToEnergyFrame(aduFrame):
if _aduToKev3DMap is None:
return aduFrame
NX, NY = _cfg['NX'], _cfg['NY']
if _aduToKev3DMap.shape[0] == 1640:
idxFrame = (np.abs(aduFrame//10)).astype(np.int32)
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
NY, NX = _aduToKev3DMap.shape[1:3]
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/10 * (np.abs(aduFrame) - idxFrame*10)
energyFrame *= np.sign(aduFrame)
else:
aduPerBin = 10
nExtraBins = 1000 // aduPerBin
idxFrame = (aduFrame//aduPerBin).astype(np.int32) + nExtraBins
idxFrame[idxFrame > _aduToKev3DMap.shape[0]-2] = _aduToKev3DMap.shape[0]-2
NY, NX = _aduToKev3DMap.shape[1:3]
_energyFrame0 = _aduToKev3DMap[idxFrame, np.arange(NY).reshape(-1, 1), np.arange(NX)]
_energyFrame1 = _aduToKev3DMap[idxFrame+1, np.arange(NY).reshape(-1, 1), np.arange(NX)]
energyFrame = _energyFrame0 + (_energyFrame1 - _energyFrame0)/aduPerBin * (aduFrame - (idxFrame-nExtraBins)*aduPerBin)
if 'energyScalingFactor' in _cfg:
energyFrame *= _cfg['energyScalingFactor']
return energyFrame
def bookHistograms(energy, suffix = '', energyBinWidth = 0.1, isMC = False):
histMaxEnergy = energy * 5.5
h1_ClusterEnergy = TH1D(f'h1_ClusterEnergy{suffix}', f'h1_ClusterEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
h1_ClusterEnergy.SetDirectory(0)
h1_ClusterEnergy.SetTitle(f'Cluster Energy;Energy [keV];Counts')
h1_PixelEnergy = TH1D(f'h1_PixelEnergy{suffix}', f'h1_PixelEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
h1_PixelEnergy.SetDirectory(0)
h1_PixelEnergy.SetTitle(f'Pixel Energy;Energy [keV];Counts')
h1_CenterPixelEnergy = TH1D(f'h1_CenterPixelEnergy{suffix}', f'h1_CenterPixelEnergy{suffix}', int(histMaxEnergy//energyBinWidth), -1, histMaxEnergy)
h1_CenterPixelEnergy.SetDirectory(0)
h1_CenterPixelEnergy.SetTitle(f'Center Pixel Energy;Energy [keV];Counts')
h2_Qc = TH2D(f'h2_Qc{suffix}', f'h2_Qc{suffix}', 201, 0, 1, 201, 0, 1)
h2_Qc.SetDirectory(0)
h2_Qc.SetTitle(f'Charge weighted center;#eta_x;#eta_y;Counts')
h2_counts = TH2D(f'h2_counts{suffix}', f'h2_counts{suffix}', _cfg['Roi'][1] - _cfg['Roi'][0], _cfg['Roi'][0], _cfg['Roi'][1], _cfg['Roi'][3] - _cfg['Roi'][2], _cfg['Roi'][2], _cfg['Roi'][3])
h2_counts.SetDirectory(0)
h2_HitsSumFrame = TH2D(f'h2_HitsSumFrame{suffix}', f'h2_HitsSumFrame{suffix}', _cfg['Roi'][1] - _cfg['Roi'][0], _cfg['Roi'][0], _cfg['Roi'][1], _cfg['Roi'][3] - _cfg['Roi'][2], _cfg['Roi'][2], _cfg['Roi'][3])
h2_HitsSumFrame.SetDirectory(0)
interpolationBins = 10
xBins = (_cfg['Roi'][1] - _cfg['Roi'][0]) * interpolationBins
yBins = (_cfg['Roi'][3] - _cfg['Roi'][2]) * interpolationBins
h2_interpolated = TH2D(f'h2_interpolated{suffix}', f'h2_interpolated{suffix}', xBins, _cfg['Roi'][0], _cfg['Roi'][1], yBins, _cfg['Roi'][2], _cfg['Roi'][3])
h2_interpolated.SetDirectory(0)
h2_interpolatedSubpixel = TH2D(f'h2_interpolatedSubpixel{suffix}', f'h2_interpolatedSubpixel{suffix}', interpolationBins, 0, 1, interpolationBins, 0, 1)
h2_interpolatedSubpixel.SetDirectory(0)
if isMC:
h1_ChargeCollectionEfficiency = TH1D(f'h1_ChargeCollectionEfficiency{suffix}', f'h1_ChargeCollectionEfficiency{suffix}', 400, 0.8, 1.01)
return h1_ClusterEnergy.Clone(), h1_PixelEnergy.Clone(), h1_CenterPixelEnergy.Clone(), h2_Qc.Clone(), h1_ChargeCollectionEfficiency.Clone()
return h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_HitsSumFrame, h2_interpolated, h2_interpolatedSubpixel
def _processFrames(idxChunk): ### for both single and double photon events, using
startFrame, endFrame = _cfg['NFrame'] * idxChunk // nChunks, min(_cfg['NFrame'] * (idxChunk + 1) // nChunks, _cfg['NFrame'])
print(f'Processing frames from {startFrame} to {endFrame}...')
Energy = _cfg['energy']
selectionRange = _cfg['selectionRange']
signalFileNames = _cfg['signalFileNames']
NX = _cfg['NX']
NY = _cfg['NY']
headerSize = _cfg['headerSize']
Roi = _cfg['Roi']
nFramePerFile = os.path.getsize(f'{signalFileNames[0]}') // (NX * NY + 56) // 2
_h1_PixelEnergy, _h1_ClusterEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_HitsSumFrame, _h2_interpolated, _h2_interpolatedSubpixel = bookHistograms(Energy, f'_{idxChunk}')
### create a cluster finder
nxROI = Roi[1] - Roi[0]
nyROI = Roi[3] - Roi[2]
CF = aare.VarClusterFinder((nxROI, nyROI), 5) ### arg1: frame size, arg2: threshold
CF.set_peripheralThresholdFactor(2)
CF.set_noiseMap(_noiseEneFrame)
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
h5_file = h5py.File(f'clusters_chunk{idxChunk}.h5', 'w')
# 创建可扩展数据集
dset_clusters = h5_file.create_dataset(
'clusters', (0, 3, 3), maxshape=(None, 3, 3), dtype='f4',
chunks=True, compression='gzip'
)
dset_refs = h5_file.create_dataset(
'referencePoint', (0, 2), maxshape=(None, 2), dtype='i4',
chunks=True
)
buffer_size = 50_000
cluster_list = []
refpoint_list = []
def flush_h5_buffer():
if not cluster_list:
return
old_len = dset_clusters.shape[0]
new_len = old_len + len(cluster_list)
dset_clusters.resize((new_len, 3, 3))
dset_refs.resize((new_len, 2))
dset_clusters[old_len:new_len] = cluster_list
dset_refs[old_len:new_len] = refpoint_list
cluster_list.clear()
refpoint_list.clear()
for idxFrame in range(startFrame, endFrame):
if idxFrame % 10000 == 0:
print(f'Processing frame {idxFrame}...')
idxFile, idxFrame = divmod(idxFrame, nFramePerFile)
try:
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
offset = (headerSize + idxFrame * (NX * NY//2 + headerSize)) * 2
fileName1 = signalFileNames[idxFile]
fileName2 = fileName1.replace('d0', 'd1')
_signalAduFrame1 = np.fromfile(f'{fileName1}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
_signalAduFrame2 = np.fromfile(f'{fileName2}', dtype=np.uint16, offset=offset, count=NX * NY //2).astype(np.int32).reshape(NY//2, NX)
signalAduFrame = np.concatenate([_signalAduFrame1, _signalAduFrame2], axis=0)
del _signalAduFrame1, _signalAduFrame2
else:
offset = (headerSize + idxFrame * (NX * NY + headerSize)) * 2
signalAduFrame = np.fromfile(f'{signalFileNames[idxFile]}', dtype=np.uint16, offset=offset, count=NX * NY).astype(np.int32).reshape(NX, NY)
except Exception as e:
print(f'Error reading frame {idxFrame} from file {signalFileNames[idxFile]}: {e}, stop processing this chunk ({startFrame}-{endFrame})')
return _h1_ClusterEnergy.Clone(), _h1_PixelEnergy.Clone(), _h1_CenterPixelEnergy.Clone(), _h2_Qc.Clone(), _h2_counts.Clone(), _h2_HitsSumFrame.Clone(), _h2_interpolated.Clone(), _h2_interpolatedSubpixel.Clone()
signalAduFrame = signalAduFrame - _pedestalAduFrame
signalEneFrame = convertAduFrameToEnergyFrame(signalAduFrame)
CF.find_clusters_X(signalEneFrame[Roi[2]:Roi[3], Roi[0]:Roi[1]])
clusters = CF.hits()
for idxCluster in range(len(clusters)):
energy = clusters['energy'][idxCluster]
_h1_ClusterEnergy.Fill(energy)
xs = clusters['cols'][idxCluster]
ys = clusters['rows'][idxCluster]
enes = clusters['enes'][idxCluster]
if energy < 5:
continue
for i in range(len(xs)):
_h1_PixelEnergy.Fill(enes[i])
_h2_HitsSumFrame.Fill(xs[i]+Roi[0], ys[i]+Roi[2], enes[i])
if 'writeClusters' in _cfg and _cfg['writeClusters'] == True:
flush_h5_buffer()
h5_file.close()
return _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_HitsSumFrame, _h2_interpolated, _h2_interpolatedSubpixel
def process():
with Pool(_cfg['NThread'], maxtasksperchild=1) as p:
results = p.map(_processFrames, range(nChunks))
global h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_HitsSumFrame, h2_interpolated, h2_interpolatedSubpixel
h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_HitsSumFrame, h2_interpolated, h2_interpolatedSubpixel = bookHistograms(_cfg['energy'])
for h in (h1_ClusterEnergy, h1_PixelEnergy, h1_CenterPixelEnergy, h2_Qc, h2_counts, h2_HitsSumFrame, h2_interpolated, h2_interpolatedSubpixel):
h.SetDirectory(0)
global sumFrame
for _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_HitsSumFrame, _h2_interpolated, _h2_interpolatedSubpixel in results:
h1_ClusterEnergy.Add(_h1_ClusterEnergy)
h1_PixelEnergy.Add(_h1_PixelEnergy)
h1_CenterPixelEnergy.Add(_h1_CenterPixelEnergy)
h2_Qc.Add(_h2_Qc)
h2_interpolated.Add(_h2_interpolated)
h2_interpolatedSubpixel.Add(_h2_interpolatedSubpixel)
h2_counts.Add(_h2_counts)
h2_HitsSumFrame.Add(_h2_HitsSumFrame)
del _h1_ClusterEnergy, _h1_PixelEnergy, _h1_CenterPixelEnergy, _h2_Qc, _h2_counts, _h2_HitsSumFrame, _h2_interpolated, _h2_interpolatedSubpixel
def getPedestalAndNoise():
NX, NY = _cfg['NX'], _cfg['NY']
pedestalFileName = _cfg['pedestalFileName']
if 'nFiber' in _cfg and _cfg['nFiber'] == 2:
pedestalFileName2 = pedestalFileName.replace('d0', 'd1')
_pedestalAduFrames1 = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
_pedestalAduFrames1 = _pedestalAduFrames1.reshape(-1, NX * NY //2 + 56)
_pedestalAduFrames1 = _pedestalAduFrames1[:, 56:].reshape(-1, NY//2, NX)
_pedestalAduFrames2 = np.fromfile(f'{pedestalFileName2}', dtype=np.uint16).astype(np.int32)
_pedestalAduFrames2 = _pedestalAduFrames2.reshape(-1, NX * NY //2 + 56)
_pedestalAduFrames2 = _pedestalAduFrames2[:, 56:].reshape(-1, NY//2, NX)
pedestalAduFrames = np.concatenate([_pedestalAduFrames1, _pedestalAduFrames2], axis=1)
del _pedestalAduFrames1, _pedestalAduFrames2
else:
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
global _pedestalAduFrame, _noiseEneFrame
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
with Pool(16) as p:
pedestalEneFrames = p.map(convertAduFrameToEnergyFrame, pedestalAduFrames)
_noiseEneFrame = np.std(pedestalEneFrames, axis=0, ddof=1)
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
del pedestalAduFrames, pedestalEneFrames
def getPedestalAndNoise_simplified():
NX, NY = _cfg['NX'], _cfg['NY']
pedestalFileName = _cfg['pedestalFileName']
pedestalAduFrames = np.fromfile(f'{pedestalFileName}', dtype=np.uint16).astype(np.int32)
pedestalAduFrames = pedestalAduFrames.reshape(-1, NX * NY + 56)
pedestalAduFrames = pedestalAduFrames[:, 56:].reshape(-1, NX, NY)
pedestalAduFrames = pedestalAduFrames[pedestalAduFrames.shape[0]//10:] # skip the first 10% frames
global _pedestalAduFrame, _noiseEneFrame
_pedestalAduFrame = np.mean(pedestalAduFrames, axis=0)
noiseAduFrame = np.std(pedestalAduFrames, axis=0, ddof=1)
_noiseEneFrame = convertAduFrameToEnergyFrame(noiseAduFrame)
print(f'Average noise = {np.mean(_noiseEneFrame):.3f} keV; {np.mean(noiseAduFrame):.3f} ADU')
del pedestalAduFrames