Add profiling to tests and separate read_individual_data into sub-methods

This commit is contained in:
2024-03-05 10:19:21 +01:00
parent 1780916a12
commit 7274e1bc85
2 changed files with 173 additions and 141 deletions

View File

@@ -123,62 +123,127 @@ class AmorData:
#return matr
#-------------------------------------------------------------------------------------------------
def read_individual_data(self, fileName, norm=False):
pixelLookUp = self.resolve_pixels()
self.hdf = h5py.File(fileName, 'r', swmr=True)
if self.readHeaderInfo:
# read general information and first data set
logging.info(f'# meta data from: {self.file_list[0]}')
self.hdf = h5py.File(self.file_list[0], 'r', swmr=True)
title = self.hdf['entry1/title'][0].decode('utf-8')
proposal_id = self.hdf['entry1/proposal_id'][0].decode('utf-8')
user_name = self.hdf['entry1/user/name'][0].decode('utf-8')
user_affiliation = 'unknown'
user_email = self.hdf['entry1/user/email'][0].decode('utf-8')
user_orcid = None
sampleName = self.hdf['entry1/sample/name'][0].decode('utf-8')
model = self.hdf['entry1/sample/model'][0].decode('utf-8')
instrumentName = 'Amor'
source = self.hdf['entry1/Amor/source/name'][0].decode('utf-8')
sourceProbe = 'neutron'
start_time = self.hdf['entry1/start_time'][0].decode('utf-8')
self.start_date = start_time.split(' ')[0]
if self.config.sampleModel:
model = self.config.sampleModel
# assembling orso header information
self.header.owner = fileio.Person(
name = user_name,
affiliation = user_affiliation,
contact = user_email,
)
if user_orcid:
self.header.owner.orcid = user_orcid
self.header.experiment = fileio.Experiment(
title = title,
instrument = instrumentName,
start_date = self.start_date,
probe = sourceProbe,
facility = source,
proposalID = proposal_id
)
self.header.sample = fileio.Sample(
name = sampleName,
model = model,
sample_parameters = None,
)
self.header.measurement_scheme = 'angle- and energy-dispersive'
self.chopperDistance = float(np.take(self.hdf['entry1/Amor/chopper/pair_separation'], 0))
self.detectorDistance = float(np.take(self.hdf['entry1/Amor/detector/transformation/distance'], 0))
self.chopperDetectorDistance = self.detectorDistance - float(np.take(self.hdf['entry1/Amor/chopper/distance'], 0))
self.tofCut = const.lamdaCut * self.chopperDetectorDistance / const.hdm * 1.e-13
self.read_header_info()
logging.info(f'# data from file: {fileName}')
self.read_individual_header()
# add header content
if self.readHeaderInfo:
self.readHeaderInfo = False
self.header.measurement_instrument_settings = fileio.InstrumentSettings(
incident_angle = fileio.ValueRange(round(self.mu+self.kap+self.kad-0.5*self.div, 3),
round(self.mu+self.kap+self.kad+0.5*self.div, 3),
'deg'),
wavelength = fileio.ValueRange(const.lamdaCut, self.config.lambdaRange[1], 'angstrom'),
polarization = fileio.Polarization.unpolarized,
)
self.header.measurement_instrument_settings.mu = fileio.Value(round(self.mu, 3), 'deg', comment='sample angle to horizon')
self.header.measurement_instrument_settings.nu = fileio.Value(round(self.nu, 3), 'deg', comment='detector angle to horizon')
if norm:
self.header.measurement_additional_files.append(fileio.File(file=fileName.split('/')[-1], timestamp=self.fileDate))
else:
self.header.measurement_data_files.append(fileio.File(file=fileName.split('/')[-1], timestamp=self.fileDate))
logging.info(f'# mu = {self.mu:6.3f}, nu = {self.nu:6.3f}, kap = {self.kap:6.3f}, kad = {self.kap:6.3f}')
# TODO: should extract monitor from counts or beam current times time
self.monitor1 = self.ctime
self.monitor2 = self.monitor1
self.read_event_stream()
totalNumber = np.shape(self.tof_e)[0]
self.extract_walltime(norm, totalNumber)
if True:
self.filter_strange_times()
self.marge_frames()
self.filter_project_x()
# correct tof for beam size effect at chopper: t_cor = (delta / 180 deg) * tau
# TODO: check for correctness
if not self.config.offSpecular:
self.tof_e -= ( self.delta_e / 180. ) * self.tau
self.calculate_derived_properties()
self.filter_qz_range(norm)
logging.info(f'# number of events: total = {totalNumber:7d}, filtered = {np.shape(self.lamda_e)[0]:7d}')
def filter_qz_range(self, norm):
if self.config.qzRange[1]<0.3 and not norm:
self.mask_e = np.logical_and(self.mask_e,
(self.config.qzRange[0]<=self.qz_e) & (self.qz_e<=self.config.qzRange[1]))
self.detZ_e = self.detZ_e[self.mask_e]
self.lamda_e = self.lamda_e[self.mask_e]
self.wallTime_e = self.wallTime_e[self.mask_e]
def calculate_derived_properties(self):
# lambda
self.lamda_e = 1.e13*self.tof_e*const.hdm/(self.chopperDetectorDistance+self.detXdist_e)
self.lamdaMax = const.lamdaCut+1.e13*self.tau*const.hdm/(self.chopperDetectorDistance+124.)
self.mask_e = np.logical_and(self.mask_e, (self.config.lambdaRange[0]<=self.lamda_e) & (
self.lamda_e<=self.config.lambdaRange[1]))
# alpha_f
alphaF_e = self.nu-self.mu+self.delta_e
# q_z
if self.config.offSpecular:
alphaI = self.kap+self.kad+self.mu
self.qz_e = 2*np.pi*(np.sin(np.deg2rad(alphaF_e))+np.sin(np.deg2rad(alphaI)))/self.lamda_e
self.qx_e = 2*np.pi*(np.cos(np.deg2rad(alphaF_e))-np.cos(np.deg2rad(alphaI)))/self.lamda_e
self.header.measurement_scheme = 'energy-dispersive',
else:
self.qz_e = 4*np.pi*np.sin(np.deg2rad(alphaF_e))/self.lamda_e
# qx_e = 0.
self.header.measurement_scheme = 'angle- and energy-dispersive'
def filter_project_x(self):
pixelLookUp = self.resolve_pixels()
# resolve pixel ID into y and z indicees, x position and angle
(detY_e, self.detZ_e, self.detXdist_e, self.delta_e) = pixelLookUp[np.int_(self.pixelID_e)-1, :].T
# define mask and filter y range
self.mask_e = (self.config.yRange[0]<=detY_e) & (detY_e<=self.config.yRange[1])
def marge_frames(self):
self.tof_e = np.remainder(self.tof_e-self.tofCut+self.tau, self.tau)+self.tofCut # tof shifted to 1 frame
self.tof_e = self.tof_e+self.tau*self.config.chopperPhaseOffset/180. # correction for time offset between chopper pulse and tof zero
def filter_strange_times(self):
# filter 'strange' tof times > 2 tau
filter_e = (self.tof_e<=2*self.tau)
self.tof_e = self.tof_e[filter_e]
self.pixelID_e = self.pixelID_e[filter_e]
self.wallTime_e = self.wallTime_e[filter_e]
if np.shape(filter_e)[0]-np.shape(self.tof_e)[0]>0.5:
logging.warning(f'# strange times: {np.shape(filter_e)[0]-np.shape(self.tof_e)[0]}')
def extract_walltime(self, norm, totalNumber):
self.wallTime_e = np.empty(totalNumber)
for i in range(len(self.dataPacket_p)-1):
self.wallTime_e[self.dataPacket_p[i]:self.dataPacket_p[i+1]] = self.dataPacketTime_p[i]
self.wallTime_e[self.dataPacket_p[-1]:] = self.dataPacketTime_p[-1]
if not self.startTime and not norm:
self.startTime = self.wallTime_e[0]
self.wallTime_e -= self.startTime
logging.debug(f'wall time from {self.wallTime_e[0]} to {self.wallTime_e[-1]}')
def read_event_stream(self):
self.tof_e = np.array(self.hdf['/entry1/Amor/detector/data/event_time_offset'][:])/1.e9
self.pixelID_e = np.array(self.hdf['/entry1/Amor/detector/data/event_id'][:], dtype=int)
self.dataPacket_p = np.array(self.hdf['/entry1/Amor/detector/data/event_index'][:], dtype=np.uint64)
self.dataPacketTime_p = np.array(self.hdf['/entry1/Amor/detector/data/event_time_zero'][:], dtype=np.uint64)/1e9
def read_individual_header(self):
self.chopperDistance = float(np.take(self.hdf['entry1/Amor/chopper/pair_separation'], 0))
self.detectorDistance = float(np.take(self.hdf['entry1/Amor/detector/transformation/distance'], 0))
self.chopperDetectorDistance = self.detectorDistance-float(np.take(self.hdf['entry1/Amor/chopper/distance'], 0))
self.tofCut = const.lamdaCut*self.chopperDetectorDistance/const.hdm*1.e-13
try:
self.mu = float(np.take(self.hdf['/entry1/Amor/master_parameters/mu/value'], 0))
self.nu = float(np.take(self.hdf['/entry1/Amor/master_parameters/nu/value'], 0))
@@ -216,96 +281,47 @@ class AmorData:
# TODO: figure out real stop time....
self.ctime=(self.hdf['/entry1/Amor/detector/data/event_time_zero'][-1]
- self.hdf['/entry1/Amor/detector/data/event_time_zero'][0]) / 1.e9
fileDate = datetime.fromisoformat( self.hdf['/entry1/start_time'][0].decode('utf-8') )
self.fileDate = datetime.fromisoformat( self.hdf['/entry1/start_time'][0].decode('utf-8') )
# add header content
if self.readHeaderInfo:
self.readHeaderInfo = False
self.header.measurement_instrument_settings = fileio.InstrumentSettings(
incident_angle = fileio.ValueRange(round(self.mu+self.kap+self.kad-0.5*self.div, 3),
round(self.mu+self.kap+self.kad+0.5*self.div, 3),
'deg'),
wavelength = fileio.ValueRange(const.lamdaCut, self.config.lambdaRange[1], 'angstrom'),
polarization = fileio.Polarization.unpolarized,
def read_header_info(self):
# read general information and first data set
logging.info(f'# meta data from: {self.file_list[0]}')
self.hdf = h5py.File(self.file_list[0], 'r', swmr=True)
title = self.hdf['entry1/title'][0].decode('utf-8')
proposal_id = self.hdf['entry1/proposal_id'][0].decode('utf-8')
user_name = self.hdf['entry1/user/name'][0].decode('utf-8')
user_affiliation = 'unknown'
user_email = self.hdf['entry1/user/email'][0].decode('utf-8')
user_orcid = None
sampleName = self.hdf['entry1/sample/name'][0].decode('utf-8')
model = self.hdf['entry1/sample/model'][0].decode('utf-8')
instrumentName = 'Amor'
source = self.hdf['entry1/Amor/source/name'][0].decode('utf-8')
sourceProbe = 'neutron'
start_time = self.hdf['entry1/start_time'][0].decode('utf-8')
self.start_date = start_time.split(' ')[0]
if self.config.sampleModel:
model = self.config.sampleModel
# assembling orso header information
self.header.owner = fileio.Person(
name=user_name,
affiliation=user_affiliation,
contact=user_email,
)
self.header.measurement_instrument_settings.mu = fileio.Value(round(self.mu, 3), 'deg', comment='sample angle to horizon')
self.header.measurement_instrument_settings.nu = fileio.Value(round(self.nu, 3), 'deg', comment='detector angle to horizon')
if norm:
self.header.measurement_additional_files.append(fileio.File(file=fileName.split('/')[-1], timestamp=fileDate))
else:
self.header.measurement_data_files.append(fileio.File(file=fileName.split('/')[-1], timestamp=fileDate))
logging.info(f'# mu = {self.mu:6.3f}, nu = {self.nu:6.3f}, kap = {self.kap:6.3f}, kad = {self.kap:6.3f}')
# TODO: should extract monitor from counts or beam current times time
self.monitor1 = self.ctime
self.monitor2 = self.monitor1
# read data event streams
tof_e = np.array(self.hdf['/entry1/Amor/detector/data/event_time_offset'][:])/1.e9
pixelID_e = np.array(self.hdf['/entry1/Amor/detector/data/event_id'][:], dtype=int)
totalNumber = np.shape(tof_e)[0]
wallTime_e = np.empty(totalNumber)
dataPacket_p = np.array(self.hdf['/entry1/Amor/detector/data/event_index'][:], dtype=np.uint64)
dataPacketTime_p = np.array(self.hdf['/entry1/Amor/detector/data/event_time_zero'][:], dtype=np.uint64)/1e9
#for i, index in enumerate(dataPacket_p):
# wallTime_e[index:] = dataPacketTime_p[i]
for i in range(len(dataPacket_p)-1):
wallTime_e[dataPacket_p[i]:dataPacket_p[i+1]] = dataPacketTime_p[i]
wallTime_e[dataPacket_p[-1]:] = dataPacketTime_p[-1]
if not self.startTime and not norm:
self.startTime = wallTime_e[0]
wallTime_e -= self.startTime
logging.debug(f'wall time from {wallTime_e[0]} to {wallTime_e[-1]}')
# filter 'strange' tof times > 2 tau
if True:
filter_e = (tof_e <= 2*self.tau)
tof_e = tof_e[filter_e]
pixelID_e = pixelID_e[filter_e]
wallTime_e = wallTime_e[filter_e]
if np.shape(filter_e)[0]-np.shape(tof_e)[0] > 0.5 :
logging.warning(f'# strange times: {np.shape(filter_e)[0]-np.shape(tof_e)[0]}')
tof_e = np.remainder( tof_e - self.tofCut + self.tau, self.tau) + self.tofCut # tof shifted to 1 frame
tof_e = tof_e + self.tau * self.config.chopperPhaseOffset / 180. # correction for time offset between chopper pulse and tof zero
# resolve pixel ID into y and z indicees, x position and angle
(detY_e, detZ_e, detXdist_e, delta_e) = pixelLookUp[np.int_(pixelID_e)-1,:].T
# define mask and filter y range
mask_e = (self.config.yRange[0] <= detY_e) & (detY_e <= self.config.yRange[1])
# correct tof for beam size effect at chopper: t_cor = (delta / 180 deg) * tau
# TODO: check for correctness
if not self.config.offSpecular:
tof_e -= ( delta_e / 180. ) * self.tau
# lambda
lamda_e = 1.e13 * tof_e * const.hdm / (self.chopperDetectorDistance + detXdist_e)
self.lamdaMax = const.lamdaCut + 1.e13 * self.tau * const.hdm / (self.chopperDetectorDistance + 124.)
mask_e = np.logical_and(mask_e, (self.config.lambdaRange[0] <= lamda_e) & (lamda_e <= self.config.lambdaRange[1]))
# alpha_f
alphaF_e = self.nu - self.mu + delta_e
# q_z
if self.config.offSpecular:
alphaI = self.kap + self.kad + self.mu
qz_e = 2 * np.pi * ( np.sin( np.deg2rad(alphaF_e) ) + np.sin( np.deg2rad( alphaI ) ) ) / lamda_e
qx_e = 2 * np.pi * ( np.cos( np.deg2rad(alphaF_e) ) - np.cos( np.deg2rad( alphaI ) ) ) / lamda_e
self.header.measurement_scheme = 'energy-dispersive',
else:
qz_e = 4 * np.pi * np.sin( np.deg2rad(alphaF_e) ) / lamda_e
# qx_e = 0.
self.header.measurement_scheme = 'angle- and energy-dispersive'
# filter q_z range
if self.config.qzRange[1] < 0.3 and not norm:
mask_e = np.logical_and(mask_e, (self.config.qzRange[0] <= qz_e) & (qz_e <= self.config.qzRange[1]))
self.detZ_e = detZ_e[mask_e]
self.lamda_e = lamda_e[mask_e]
self.wallTime_e = wallTime_e[mask_e]
logging.info(f'# number of events: total = {totalNumber:7d}, filtered = {np.shape(self.lamda_e)[0]:7d}')
if user_orcid:
self.header.owner.orcid = user_orcid
self.header.experiment = fileio.Experiment(
title=title,
instrument=instrumentName,
start_date=self.start_date,
probe=sourceProbe,
facility=source,
proposalID=proposal_id
)
self.header.sample = fileio.Sample(
name=sampleName,
model=model,
sample_parameters=None,
)
self.header.measurement_scheme = 'angle- and energy-dispersive'

View File

@@ -1,4 +1,5 @@
import os
import cProfile
from unittest import TestCase
from libeos import options, reduction, logconfig
@@ -6,12 +7,22 @@ logconfig.setup_logging()
logconfig.update_loglevel(True, False)
class FullAmorTest(TestCase):
@classmethod
def setUpClass(cls):
cls.pr = cProfile.Profile()
@classmethod
def tearDownClass(cls):
cls.pr.dump_stats("profile_test.prof")
def setUp(self):
self.pr.enable()
self.reader_config = options.ReaderConfig(
year=2023,
dataPath=os.path.join('..', "test_data"))
def tearDown(self):
self.pr.disable()
for fi in ['test.Rqz.ort', '614.norm']:
try:
os.unlink(os.path.join(self.reader_config.dataPath, fi))
@@ -46,6 +57,11 @@ class FullAmorTest(TestCase):
outputName='test'
)
config=options.EOSConfig(self.reader_config, experiment_config, reduction_config, output_config)
# run three times to get similar timing to noslicing runs
reducer = reduction.AmorReduction(config)
reducer.reduce()
reducer = reduction.AmorReduction(config)
reducer.reduce()
reducer = reduction.AmorReduction(config)
reducer.reduce()