separte hdf file header reading, start events2histogram and fix test

This commit is contained in:
2025-10-07 11:20:56 +02:00
parent aacbe3ed6f
commit 99af021b3e
9 changed files with 254 additions and 78 deletions

View File

@@ -8,7 +8,7 @@ Author: Jochen Stahn (algorithms, python draft),
import logging
# need to do absolute import here as pyinstaller requires it
from eos.options import EOSConfig, ReaderConfig, ExperimentConfig, ReductionConfig, OutputConfig
from eos.options import ReflectivityConfig, ReaderConfig, ExperimentConfig, ReflectivityReductionConfig, ReflectivityOutputConfig
from eos.command_line import commandLineArgs
from eos.logconfig import setup_logging, update_loglevel
@@ -16,15 +16,15 @@ def main():
setup_logging()
# read command line arguments and generate classes holding configuration parameters
clas = commandLineArgs([ReaderConfig, ExperimentConfig, ReductionConfig, OutputConfig],
clas = commandLineArgs([ReaderConfig, ExperimentConfig, ReflectivityReductionConfig, ReflectivityOutputConfig],
'eos')
update_loglevel(clas.verbose)
reader_config = ReaderConfig.from_args(clas)
experiment_config = ExperimentConfig.from_args(clas)
reduction_config = ReductionConfig.from_args(clas)
output_config = OutputConfig.from_args(clas)
config = EOSConfig(reader_config, experiment_config, reduction_config, output_config)
reduction_config = ReflectivityReductionConfig.from_args(clas)
output_config = ReflectivityOutputConfig.from_args(clas)
config = ReflectivityConfig(reader_config, experiment_config, reduction_config, output_config)
logging.warning('######## eos - data reduction for Amor ########')

32
eos/e2h.py Normal file
View File

@@ -0,0 +1,32 @@
import logging
# need to do absolute import here as pyinstaller requires it
from eos.options import E2HConfig, ReaderConfig, ExperimentConfig
from eos.command_line import commandLineArgs
from eos.logconfig import setup_logging, update_loglevel
def main():
setup_logging()
# read command line arguments and generate classes holding configuration parameters
clas = commandLineArgs([ReaderConfig, ExperimentConfig],
'events2histogram')
update_loglevel(clas.verbose)
reader_config = ReaderConfig.from_args(clas)
experiment_config = ExperimentConfig.from_args(clas)
config = E2HConfig(reader_config, experiment_config, )
logging.warning('######## events2histogram - data vizualization for Amor ########')
# only import heavy module if sufficient command line parameters were provided
from eos.reduction_reflectivity import ReflectivityReduction
# Create reducer with these arguments
reducer = ReflectivityReduction(config)
# Perform actual reduction
reducer.reduce()
logging.info('######## events2histogram - finished ########')
if __name__ == '__main__':
main()

View File

@@ -83,14 +83,10 @@ class AssociatePulseWithMonitor(EventDataAction):
def perform_action(self, dataset: EventDatasetProtocol)->None:
logging.debug(f' using monitor type {self.monitorType}')
if self.monitorType in [MonitorType.proton_charge or MonitorType.debug]:
monitorPerPulse = self.get_current_per_pulse(dataset.data.pulses.time,
dataset.data.pulses.monitor = self.get_current_per_pulse(dataset.data.pulses.time,
dataset.data.proton_current.time,
dataset.data.proton_current.current)\
* 2*dataset.timing.tau * 1e-3
# filter low-current pulses
dataset.data.pulses.monitor = np.where(
monitorPerPulse > 2*dataset.timing.tau * self.lowCurrentThreshold * 1e-3,
monitorPerPulse, 0)
elif self.monitorType==MonitorType.time:
dataset.data.pulses.monitor = 2*dataset.timing.tau
else: # pulses
@@ -125,8 +121,11 @@ class FilterMonitorThreshold(EventDataAction):
if not 'wallTime' in dataset.data.events.dtype.names:
raise ValueError(
"FilterMonitorThreshold requires walltTime to be extracted, please run ExtractWalltime first")
goodTimeS = dataset.data.pulses.time[dataset.data.pulses.monitor!=0]
low_current_filter = dataset.data.pulses.monitor>2*dataset.timing.tau*self.lowCurrentThreshold*1e-3
dataset.data.pulses.monitor[np.logical_not(low_current_filter)] = 0.
goodTimeS = dataset.data.pulses.time[low_current_filter]
filter_e = np.logical_not(np.isin(dataset.data.events.wallTime, goodTimeS))
dataset.data.events.mask += EVENT_BITMASKS['MonitorThreshold']*filter_e
logging.info(f' low-beam (<{self.lowCurrentThreshold} mC) rejected pulses: '
f'{dataset.data.pulses.monitor.shape[0]-goodTimeS.shape[0]} '

View File

@@ -34,47 +34,21 @@ if platform.node().startswith('amor'):
else:
NICOS_CACHE_DIR = None
class AmorEventData:
class AmorHeader:
"""
Read one amor NeXus datafile and extract relevant header information.
Implements EventDatasetProtocol
Collects header information from Amor NeXus fiel without reading event data.
"""
file_list: List[str]
first_index: int
last_index: int = -1
EOF: bool = False
max_events: int
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
instrument_settings: fileio.InstrumentSettings
geometry: AmorGeometry
timing: AmorTiming
data: AmorEventStream
eventStartTime: np.int64
def __init__(self, fileName:Union[str, h5py.File, BinaryIO], first_index:int=0, max_events:int=100_000_000):
def __init__(self, fileName:Union[str, h5py.File, BinaryIO]):
if type(fileName) is str:
self.file_list = [fileName]
self.hdf = h5py.File(fileName, 'r', swmr=True)
elif type(fileName) is h5py.File:
self.file_list = [fileName.filename]
self.hdf = fileName
else:
self.file_list = [repr(fileName)]
self.hdf = h5py.File(fileName, 'r')
self.first_index = first_index
self.max_events = max_events
self.read_header_info()
self.read_instrument_configuration()
self.read_event_stream()
# actions applied to any dataset
self.read_chopper_trigger_stream()
self.read_proton_current_stream()
if type(fileName) is str:
# close the input file to free memory, only if the file was opened in this object
@@ -223,6 +197,55 @@ class AmorEventData:
header.sample = self.sample
header.measurement_instrument_settings = self.instrument_settings
class AmorEventData(AmorHeader):
"""
Read one amor NeXus datafile and extract relevant header information.
Implements EventDatasetProtocol
"""
file_list: List[str]
first_index: int
last_index: int = -1
EOF: bool = False
max_events: int
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
instrument_settings: fileio.InstrumentSettings
geometry: AmorGeometry
timing: AmorTiming
data: AmorEventStream
eventStartTime: np.int64
def __init__(self, fileName:Union[str, h5py.File, BinaryIO], first_index:int=0, max_events:int=100_000_000):
if type(fileName) is str:
self.file_list = [fileName]
hdf = h5py.File(fileName, 'r', swmr=True)
elif type(fileName) is h5py.File:
self.file_list = [fileName.filename]
hdf = fileName
else:
self.file_list = [repr(fileName)]
hdf = h5py.File(fileName, 'r')
self.first_index = first_index
self.max_events = max_events
super().__init__(hdf)
self.hdf = hdf
self.read_event_stream()
# actions applied to any dataset
self.read_chopper_trigger_stream()
self.read_proton_current_stream()
if type(fileName) is str:
# close the input file to free memory, only if the file was opened in this object
self.hdf.close()
del(self.hdf)
def read_event_stream(self):
"""
Read the actual event data from file. If file is too large, find event index from packets

View File

@@ -5,6 +5,7 @@ Class to handle Orso header information that changes gradually during the reduct
import platform
import sys
from datetime import datetime
from typing import List, Literal
from orsopy import fileio
@@ -13,13 +14,16 @@ from . import __version__
class Header:
"""orso compatible output file header content"""
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
measurement_instrument_settings: fileio.InstrumentSettings
measurement_scheme: Literal["angle- and energy-dispersive", "angle-dispersive", "energy-dispersive"]
measurement_data_files: List[fileio.File]
measurement_additional_files: List[fileio.File]
def __init__(self):
self.owner = None
self.experiment = None
self.sample = None
self.measurement_instrument_settings = None
self.measurement_scheme = None
self.measurement_data_files = []
self.measurement_additional_files = []
@@ -64,10 +68,3 @@ class Header:
if columns is None:
columns = self.columns()
return fileio.Orso(ds, red, columns+extra_columns)
#-------------------------------------------------------------------------------------------------
def create_call_string(self):
callString = ' '.join(sys.argv)
if '-Y' not in callString:
callString += f' -Y {datetime.now().year}'
return callString
#-------------------------------------------------------------------------------------------------

View File

@@ -10,6 +10,7 @@ import numpy as np
import logging
try:
from enum import StrEnum
except ImportError:
@@ -316,7 +317,7 @@ class NormalisationMethod(StrEnum):
under_illuminated = 'u'
@dataclass
class ReductionConfig(ArgParsable):
class ReflectivityReductionConfig(ArgParsable):
fileIdentifier: List[str] = field(
metadata={
'short': 'f',
@@ -423,7 +424,7 @@ class PlotColormaps(StrEnum):
nipy_spectral = "nipy_spectral"
@dataclass
class OutputConfig(ArgParsable):
class ReflectivityOutputConfig(ArgParsable):
outputFormats: List[OutputFomatOption] = field(
default_factory=lambda: ['Rqz.ort'],
metadata={
@@ -492,11 +493,11 @@ class OutputConfig(ArgParsable):
# ===================================
@dataclass
class EOSConfig:
class ReflectivityConfig:
reader: ReaderConfig
experiment: ExperimentConfig
reduction: ReductionConfig
output: OutputConfig
reduction: ReflectivityReductionConfig
output: ReflectivityOutputConfig
_call_string_overwrite=None
@@ -589,7 +590,81 @@ class EOSConfig:
logging.debug(f'Argument list build in EOSConfig.call_string: {mlst}')
return mlst
class E2HPlotSelection(StrEnum):
All = 'all'
YZ = 'Iyz'
LT = 'Ilt'
TZ = 'Itz'
Q = 'Iq'
L = 'Il'
T = 'It'
ToF = 'tof'
class E2HPlotArguments(StrEnum):
Default = 'def'
OutputFile = 'file'
Logarithmic = 'log'
@dataclass
class E2HReductionConfig(ArgParsable):
fileIdentifier: str = field(
metadata={
'short': 'f',
'priority': 100,
'group': 'input data',
'help': 'file number(s) or offset (if < 1), events2histogram only accepts one short code',
},
)
show_plot: bool = field(
default=False,
metadata={
'short': 'sp',
'group': 'output',
'help': 'show matplotlib graphs of results',
},
)
plot: E2HPlotSelection = field(
default=E2HPlotSelection.All,
metadata={
'short': 'p',
'group': 'output',
'help': 'select what to plot or write',
},
)
plotArgs: E2HPlotArguments = field(
default=E2HPlotArguments.Default,
metadata={
'short': 'pa',
'group': 'output',
'help': 'select configuration for plot',
},
)
update: bool = field(
default=False,
metadata={
'short': 'u',
'group': 'output',
'help': 'keep running in the background and update plot when file is modified, implies --plot',
},
)
fast: bool = field(
default=False,
metadata={
'group': 'input data',
'help': 'skip some reduction steps to speed up calculations',
},
)
@dataclass
class E2HConfig:
reader: ReaderConfig
experiment: ExperimentConfig
reduction: E2HReductionConfig

50
eos/reduction_e2h.py Normal file
View File

@@ -0,0 +1,50 @@
"""
Events 2 histogram, quick reduction of single file to display during experiment.
Can be used as a live preview with automatic update when files are modified.
"""
import logging
from .file_reader import AmorEventData
from .header import Header
from .instrument import LZGrid
from .normalization import LZNormalisation
from .options import E2HConfig, IncidentAngle, MonitorType, NormalisationMethod, MONITOR_UNITS, ExperimentConfig
from . import event_handling as eh, event_analysis as ea
class E2HReduction:
config: E2HConfig
header: Header
event_actions: eh.EventDataAction
def __init__(self, config: E2HConfig):
self.config = config
self.header = Header()
self.prepare_actions()
def prepare_actions(self):
"""
Does not do any actual reduction.
"""
# Actions on datasets not used for normalization
self.event_actions = eh.ApplyPhaseOffset(self.config.experiment.chopperPhaseOffset)
self.event_actions |= eh.CorrectChopperPhase()
self.event_actions |= ea.ExtractWalltime()
self.event_actions |= eh.AssociatePulseWithMonitor(self.config.experiment.monitorType)
if self.config.experiment.monitorType in [MonitorType.proton_charge or MonitorType.debug]:
# the filtering only makes sense if using actual monitor data, not time
self.event_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
self.event_actions |= eh.FilterStrangeTimes()
self.event_actions |= ea.MergeFrames()
self.event_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)
self.event_actions |= eh.TofTimeCorrection(self.config.experiment.incidentAngle==IncidentAngle.alphaF)
self.event_actions |= ea.CalculateWavelength(self.config.experiment.lambdaRange)
self.event_actions |= eh.ApplyMask()
self.grid = LZGrid([0.01], [0.0, 0.25])
def reduce(self):
self.norm = LZNormalisation.unity(self.grid)

View File

@@ -8,7 +8,7 @@ from orsopy import fileio
from .file_reader import AmorEventData
from .header import Header
from .path_handling import PathResolver
from .options import EOSConfig, IncidentAngle, MonitorType, NormalisationMethod, MONITOR_UNITS
from .options import ReflectivityConfig, IncidentAngle, MonitorType, NormalisationMethod, MONITOR_UNITS
from .instrument import LZGrid
from .normalization import LZNormalisation
from . import event_handling as eh, event_analysis as ea
@@ -16,12 +16,12 @@ from .projection import LZProjection
class ReflectivityReduction:
config: EOSConfig
config: ReflectivityConfig
header: Header
normevent_actions: eh.EventDataAction
dataevent_actions: eh.EventDataAction
def __init__(self, config: EOSConfig):
def __init__(self, config: ReflectivityConfig):
self.config = config
self.header = Header()
@@ -44,7 +44,7 @@ class ReflectivityReduction:
self.normevent_actions |= eh.AssociatePulseWithMonitor(self.config.experiment.monitorType)
if self.config.experiment.monitorType in [MonitorType.proton_charge, MonitorType.debug]:
self.normevent_actions |= ea.ExtractWalltime()
self.dataevent_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
self.normevent_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
self.normevent_actions |= eh.FilterStrangeTimes()
self.normevent_actions |= ea.MergeFrames()
self.normevent_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)

View File

@@ -14,7 +14,7 @@ class FullAmorTest(TestCase):
def setUpClass(cls):
# generate map for option defaults
cls._field_defaults = {}
for opt in [options.ExperimentConfig, options.ReductionConfig, options.OutputConfig]:
for opt in [options.ExperimentConfig, options.ReflectivityReductionConfig, options.ReflectivityOutputConfig]:
defaults = {}
for field in fields(opt):
if field.default not in [None, MISSING]:
@@ -58,28 +58,28 @@ class FullAmorTest(TestCase):
muOffset=0.0,
sampleModel='air | 10 H2O | D2O'
)
reduction_config = options.ReductionConfig(
normalisationMethod=self._field_defaults['ReductionConfig']['normalisationMethod'],
reduction_config = options.ReflectivityReductionConfig(
normalisationMethod=self._field_defaults['ReflectivityReductionConfig']['normalisationMethod'],
qResolution=0.01,
qzRange=self._field_defaults['ReductionConfig']['qzRange'],
qzRange=self._field_defaults['ReflectivityReductionConfig']['qzRange'],
thetaRange=(-0.75, 0.75),
fileIdentifier=["6003-6005"],
scale=[1],
normalisationFileIdentifier=[],
timeSlize=[300.0]
)
output_config = options.OutputConfig(
output_config = options.ReflectivityOutputConfig(
outputFormats=[options.OutputFomatOption.Rqz_ort],
outputName='test',
outputPath='test_results',
)
config=options.EOSConfig(self.reader_config, experiment_config, reduction_config, output_config)
config=options.ReflectivityConfig(self.reader_config, experiment_config, reduction_config, output_config)
# run three times to get similar timing to noslicing runs
reducer = reduction.ReflectivityReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
reducer = reduction.ReflectivityReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
reducer = reduction.ReflectivityReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
def test_noslicing(self):
@@ -96,24 +96,24 @@ class FullAmorTest(TestCase):
nu=0,
muOffset=0.0,
)
reduction_config = options.ReductionConfig(
normalisationMethod=self._field_defaults['ReductionConfig']['normalisationMethod'],
reduction_config = options.ReflectivityReductionConfig(
normalisationMethod=self._field_defaults['ReflectivityReductionConfig']['normalisationMethod'],
qResolution=0.01,
qzRange=self._field_defaults['ReductionConfig']['qzRange'],
qzRange=self._field_defaults['ReflectivityReductionConfig']['qzRange'],
thetaRange=(-0.75, 0.75),
fileIdentifier=["6003", "6004", "6005"],
scale=[1],
normalisationFileIdentifier=["5952"],
autoscale=(0.0, 0.05),
)
output_config = options.OutputConfig(
output_config = options.ReflectivityOutputConfig(
outputFormats=[options.OutputFomatOption.Rqz_ort],
outputName='test',
outputPath='test_results',
)
config=options.EOSConfig(self.reader_config, experiment_config, reduction_config, output_config)
reducer = reduction.ReflectivityReduction(config)
config=options.ReflectivityConfig(self.reader_config, experiment_config, reduction_config, output_config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
# run second time to reuse norm file
reducer = reduction.ReflectivityReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()