2 Commits
v3.2.8 ... main

Author SHA1 Message Date
6951b15630 Add new NeXus file mapping, extra metadata and facility for legacy file mapping compatibility
All checks were successful
Unit Testing / test (3.11) (push) Successful in 48s
Unit Testing / test (3.10) (push) Successful in 54s
Unit Testing / test (3.8) (push) Successful in 48s
Unit Testing / test (3.12) (push) Successful in 54s
Unit Testing / test (3.9) (push) Successful in 54s
2026-03-19 08:59:57 +01:00
12dc8730d1 Implement compatibility with older nexus file mappings
All checks were successful
Unit Testing / test (3.10) (push) Successful in 48s
Unit Testing / test (3.11) (push) Successful in 47s
Unit Testing / test (3.12) (push) Successful in 49s
Unit Testing / test (3.8) (push) Successful in 48s
Unit Testing / test (3.9) (push) Successful in 47s
2026-03-18 14:58:42 +01:00
3 changed files with 186 additions and 66 deletions

View File

@@ -2,5 +2,5 @@
Package to handle data redction at AMOR instrument to be used by __main__.py script.
"""
__version__ = '3.2.8'
__date__ = '2026-03-18'
__version__ = '3.3.0'
__date__ = '2026-03-19'

44
eos/compat.py Normal file
View File

@@ -0,0 +1,44 @@
"""
Presever compatibility with previous versions of AMOR datafiles.
"""
from datetime import datetime
# old hdf parameter mappings, key is last date of validity
legacy_hdf_paths = {
datetime(2026, 3, 18).timestamp():
dict(
title=('entry1/title', str),
proposal_id=('entry1/proposal_id', str),
user_name=('entry1/user/name', str),
user_email=('entry1/user/email', str),
sample_name=('entry1/sample/name', str),
source_name=('entry1/Amor/source/name', str),
sample_model=('entry1/sample/model', str),
start_time=('entry1/start_time', str),
start_time_fallback=('entry1/Amor/instrument_control_parameters/start_time', str),
chopper_separation=('entry1/Amor/chopper/pair_separation', float),
detector_distance=('entry1/Amor/detector/transformation/distance', float),
chopper_distance=('entry1/Amor/chopper/distance', float),
sample_temperature=('entry1/sample/temperature', float),
sample_magnetic_field=('entry1/sample/magnetic_field', float),
mu=('entry1/Amor/instrument_control_parameters/mu', float, 'mu'),
nu=('entry1/Amor/instrument_control_parameters/nu', float, 'nu'),
kap=('entry1/Amor/instrument_control_parameters/kappa', float, 'kappa'),
kad=('entry1/Amor/instrument_control_parameters/kappa_offset', float, 'kappa_offset'),
div=('entry1/Amor/instrument_control_parameters/div', float, 'div'),
ch1_trigger_phase=('entry1/Amor/chopper/ch1_trigger_phase', float, 'ch1_trigger_phase'),
ch2_trigger_phase=('entry1/Amor/chopper/ch2_trigger_phase', float, 'ch2_trigger_phase'),
chopper_speed=('entry1/Amor/chopper/rotation_speed', float, 'chopper_phase'),
chopper_phase=('entry1/Amor/chopper/phase', float, 'chopper_phase'),
polarization_config_label=('entry1/Amor/polarization/configuration', int, 'polarization_config_label', '/*'),
data=('entry1/Amor/detector/data', None), # data group used to load events from
trigger=('entry1/Amor/chopper/ch2_trigger', float),
proton_current=('entry1/Amor/detector/proton_current', None),
),
}
# create a sorted list of validity timestamps for quick comparison
legacy_cutoffs = list(sorted(legacy_hdf_paths.keys()))

View File

@@ -14,7 +14,7 @@ from datetime import datetime
from orsopy import fileio
from orsopy.fileio.model_language import SampleModel
from . import const
from . import const, compat
from .header import Header
from .event_data_types import AmorGeometry, AmorTiming, AmorEventStream, LOG_TYPE, PACKET_TYPE, EVENT_TYPE, PULSE_TYPE, \
PC_TYPE
@@ -29,6 +29,7 @@ except ImportError:
# Time zone used to interpret time strings
AMOR_LOCAL_TIMEZONE = zoneinfo.ZoneInfo(key='Europe/Zurich')
UTC = zoneinfo.ZoneInfo(key='UTC')
NO_DEFAULT_VALUE = object() # just for allowing None default value in AmorHeader.rv
class AmorHeader:
"""
@@ -40,30 +41,50 @@ class AmorHeader:
proposal_id=('entry1/proposal_id', str),
user_name=('entry1/user/name', str),
user_email=('entry1/user/email', str),
user_affiliation=('entry1/user/affiliation', str),
sample_name=('entry1/sample/name', str),
source_name=('entry1/Amor/source/name', str),
sample_model=('entry1/sample/model', str),
sample_geometry=('entry1/sample/geometry', dict),
source_name=('entry1/Amor/source/name', str),
start_time=('entry1/start_time', str),
start_time_fallback=('entry1/Amor/instrument_control_parameters/start_time', str),
start_time_fallback=('entry1/Amor/measurement_configuration/start_time', str),
end_time=('entry1/end_time', str),
chopper_separation=('entry1/Amor/chopper/pair_separation', float),
detector_distance=('entry1/Amor/detector/transformation/distance', float),
chopper_distance=('entry1/Amor/chopper/distance', float),
sample_temperature=('entry1/sample/temperature', float),
sample_magnetic_field=('entry1/sample/magnetic_field', float),
detector_distance=('entry1/Amor/detector/transformation/distance', float),
sample_temperature=('entry1/sample/environment/temperature', float),
sample_magnetic_field=('entry1/sample/environment/magnetic_field', float),
sample_current=('entry1/sample/environment/current', float),
sample_voltage=('entry1/sample/environment/voltage', float),
sample_gas_pressure=('entry1/sample/environment/gas_pressure', float),
mu=('entry1/Amor/instrument_control_parameters/mu', float, 'mu'),
nu=('entry1/Amor/instrument_control_parameters/nu', float, 'nu'),
kap=('entry1/Amor/instrument_control_parameters/kappa', float, 'kappa'),
kad=('entry1/Amor/instrument_control_parameters/kappa_offset', float, 'kappa_offset'),
div=('entry1/Amor/instrument_control_parameters/div', float, 'div'),
mu=('entry1/measurement_configuration/mu', float, 'mu'),
nu=('entry1/measurement_configuration/nu', float, 'nu'),
kap=('entry1/measurement_configuration/kappa', float, 'kappa'),
kad=('entry1/measurement_configuration/kappa_offset', float, 'kappa_offset'),
div=('entry1/measurement_configuration/div', float, 'div'),
virtual_source_horizontal=('entry1/Amor/virtual_source/horizontal', float),
virtual_source_vertical=('entry1/Amor/virtual_source/vertical', float),
ch1_trigger_phase=('entry1/Amor/chopper/ch1_trigger_phase', float, 'ch1_trigger_phase'),
ch2_trigger_phase=('entry1/Amor/chopper/ch2_trigger_phase', float, 'ch2_trigger_phase'),
chopper_speed=('entry1/Amor/chopper/rotation_speed', float, 'chopper_phase'),
chopper_phase=('entry1/Amor/chopper/phase', float, 'chopper_phase'),
polarization_config_label=('entry1/Amor/polarization/configuration', int, 'polarization_config_label', '/*'),
data=('entry1/detector/data', None), # data group used to load events from
trigger=('entry1/detector/trigger', float),
monitor=('entry1/detector/monitor', float),
proton_current=('entry1/detector/proton_current', float),
acquisition_filter=('entry1/detector/acquisition_filter', int),
)
hdf: h5py.File
# environment parameters from above used for sample header (sample_{param} key)
SEE_PARAMS = ['temperature', 'magnetic_field', 'current', 'voltage', 'gas_pressure']
def __init__(self, fileName:Union[str, h5py.File, BinaryIO]):
if type(fileName) is str:
logging.warning(f' {fileName.split("/")[-1]}')
@@ -88,11 +109,24 @@ class AmorHeader:
year = self.fileDate.strftime('%Y')
return lookup_nicos_value(key, nicos_key, dtype, suffix, year)
def rv(self, key):
def rv(self, key, with_unit=False, default=NO_DEFAULT_VALUE):
"""
Generic read value methos based on key in hdf_paths dictionary.
If with_unit is True, return tuple of value, unit.
"""
hdf_path, dtype, *nicos = self.hdf_paths[key]
# TODO: refactor method for better readibility/reliability
try:
hdf_path, dtype, *nicos = self.hdf_paths[key]
except KeyError:
if default is not NO_DEFAULT_VALUE:
logging.debug(f' Key {key} not present in file version, using default value')
# if default is given, ignore missing hdf_path for backward compatibility
if with_unit:
return default, ''
else:
return default
else:
raise
try:
hdfgrp = self.hdf[hdf_path]
if hdfgrp.attrs.get('NX_class', None) == 'NXlog':
@@ -108,19 +142,38 @@ class AmorHeader:
output = dtype(hdfgrp['value'][start_index, 0])
# make sure key is only appended if no exception was raised
self._log_keys.append(key)
return output
if with_unit:
unit = hdfgrp['value'].attrs.get('units', '')
return output, unit
else:
return output
elif dtype is str:
return self.read_string(hdf_path)
else:
if len(hdfgrp.shape)==1:
return dtype(hdfgrp[0])
output = dtype(hdfgrp[0])
else:
return dtype(hdfgrp[()])
output = dtype(hdfgrp[()])
if with_unit:
unit = hdfgrp.attrs.get('units', '')
return output, unit
else:
return output
except (KeyError, IndexError):
if nicos:
nicos_key = nicos[0]
suffix = nicos[1] if len(nicos)>1 else ''
return self._replace_if_missing(key, nicos_key, dtype, suffix)
output = self._replace_if_missing(key, nicos_key, dtype, suffix)
if with_unit:
return output, ''
else:
return output
elif default is not NO_DEFAULT_VALUE:
logging.debug(f' Using default value for {key}')
if with_unit:
return default, ''
else:
return default
else:
raise
@@ -138,29 +191,41 @@ class AmorHeader:
start_time = self.rv('start_time')
except KeyError:
start_time = self.rv('start_time_fallback')
end_time = self.rv('end_time', default=None)
# extract start time as unix time, adding UTC offset of 1h to time string
if start_time.endswith('Z') and sys.version_info.minor<11:
# older python versions did not support Z format
start_time = start_time[:-1]
TZ = UTC
if end_time:
end_time = end_time[:-1]
else:
TZ = AMOR_LOCAL_TIMEZONE
start_date = datetime.fromisoformat(start_time)
end_date = datetime.fromisoformat(end_time) if end_time else None
self.fileDate = start_date.replace(tzinfo=TZ)
self._start_time_ns = np.uint64(self.fileDate.timestamp()*1e9)
start_timestamp = self.fileDate.timestamp()
if start_timestamp<=compat.legacy_cutoffs[-1]:
for cutoff in compat.legacy_cutoffs:
if start_timestamp<=cutoff:
self.hdf_paths = compat.legacy_hdf_paths[cutoff]
break
cutoff_date = datetime.fromtimestamp(cutoff)
logging.info(f" Detected legacy file format, using older mapping from before {cutoff_date.strftime('%Y-%m-%d')}")
self._start_time_ns = np.uint64(start_timestamp*1e9)
# read general information and first data set
title = self.rv('title')
proposal_id = self.rv('proposal_id')
user_name = self.rv('user_name')
user_affiliation = 'unknown'
user_affiliation = self.rv('user_affiliation', default='unknown')
user_email = self.rv('user_email')
user_orcid = None
sampleName = self.rv('sample_name')
instrumentName = 'Amor'
source = self.rv('source_name')
sourceProbe = 'neutron'
model = self.rv('sample_model')
if 'stack' in model:
import yaml
@@ -178,12 +243,14 @@ class AmorHeader:
self.experiment = fileio.Experiment(
title=title,
instrument=instrumentName,
instrument='Amor',
start_date=start_date,
probe=sourceProbe,
probe='neutron',
facility=source,
proposalID=proposal_id
)
if end_date:
self.experiment.end_date = end_date
if model['stack'] == '':
om = None
else:
@@ -193,19 +260,15 @@ class AmorHeader:
model=om,
sample_parameters={},
)
# while event times are not evaluated, use average_value reported in file for SEE
if self.hdf['entry1/sample'].get('temperature', None) is not None:
try:
sample_temperature = self.rv('sample_temperature')
except IndexError: pass
else:
self.sample.sample_parameters['temperature'] = fileio.Value(sample_temperature, unit='K')
if self.hdf['entry1/sample'].get('magnetic_field', None) is not None:
try:
sample_magnetic_field = self.rv('sample_magnetic_field')
except IndexError: pass
else:
self.sample.sample_parameters['magnetic_field'] = fileio.Value(sample_magnetic_field, unit='T')
sample_geometry = self.rv('sample_geometry', default=None)
if sample_geometry:
self.sample.geometry = sample_geometry
# while event times are not evaluated, use first value reported in file for SEE
for param in self.SEE_PARAMS:
key = f'sample_{param}'
value, unit = self.rv(key, with_unit=True, default=None)
if value is not None:
self.sample.sample_parameters[param] = fileio.Value(value, unit=unit)
def read_instrument_configuration(self):
chopperSeparation = self.rv('chopper_separation')
@@ -218,13 +281,16 @@ class AmorHeader:
kap = self.rv('kap')
kad = self.rv('kad')
div = self.rv('div')
VS_H = self.rv('virtual_source_horizontal', default=-1.)
VS_V = self.rv('virtual_source_vertical', default=-1.)
ch1TriggerPhase = self.rv('ch1_trigger_phase')
ch2TriggerPhase = self.rv('ch2_trigger_phase')
try:
chopperTriggerTime = (float(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_zero'][7]) \
-float(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_zero'][0])) \
trigger_group = self.hdf[self.hdf_paths['trigger'][0]]
chopperTriggerTime = (float(trigger_group['event_time_zero'][7])
-float(trigger_group['event_time_zero'][0])) \
/7
chopperTriggerTimeDiff = float(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_offset'][2])
chopperTriggerTimeDiff = float(trigger_group['event_time_offset'][2])
except (KeyError, IndexError):
logging.debug(' chopper speed and phase taken from .hdf file')
chopperSpeed = self.rv('chopper_speed')
@@ -276,7 +342,9 @@ class AmorHeader:
round(kad, 3),
'deg',
comment='incoming beam angular offset')
if VS_H>0 and VS_V>0:
self.instrument_settings.virtual_source_horizontal = fileio.Value(VS_H, 'mm')
self.instrument_settings.virtual_source_vertical = fileio.Value(VS_V, 'mm')
def update_header(self, header:Header):
"""
@@ -345,20 +413,21 @@ class AmorEventData(AmorHeader):
Read the actual event data from file. If file is too large, find event index from packets
that allow splitting of file smaller than self.max_events.
"""
packets = np.recarray(self.hdf['/entry1/Amor/detector/data/event_index'].shape, dtype=PACKET_TYPE)
packets.start_index = self.hdf['/entry1/Amor/detector/data/event_index'][:]
packets.time = self.hdf['/entry1/Amor/detector/data/event_time_zero'][:]
data_group = self.hdf[self.hdf_paths['data'][0]]
packets = np.recarray(data_group['event_index'].shape, dtype=PACKET_TYPE)
packets.start_index = data_group['event_index'][:]
packets.time = data_group['event_time_zero'][:]
try:
# packet index that matches first event index
start_packet = int(np.where(packets.start_index==self.first_index)[0][0])
except IndexError:
raise EOFError(f'No event packet found starting at event #{self.first_index}, '
f'number of events is {self.hdf["/entry1/Amor/detector/data/event_time_offset"].shape[0]}')
f'number of events is {data_group["event_time_offset"].shape[0]}')
packets = packets[start_packet:]
if packets.shape[0]==0:
raise EOFError(f'No more packets left after start_packet filter')
nevts = self.hdf['/entry1/Amor/detector/data/event_time_offset'].shape[0]
nevts = data_group['event_time_offset'].shape[0]
if (nevts-self.first_index)>self.max_events:
end_packet = np.where(packets.start_index<=(self.first_index+self.max_events))[0][-1]
end_packet = max(1, end_packet)
@@ -381,12 +450,17 @@ class AmorEventData(AmorHeader):
packets.start_index -= self.first_index
events = np.recarray(nevts, dtype=EVENT_TYPE)
events.tof = np.array(self.hdf['/entry1/Amor/detector/data/event_time_offset'][self.first_index:self.last_index+1])/1.e9
events.pixelID = self.hdf['/entry1/Amor/detector/data/event_id'][self.first_index:self.last_index+1]
events.tof = np.array(data_group['event_time_offset'][self.first_index:self.last_index+1])/1.e9
events.pixelID = data_group['event_id'][self.first_index:self.last_index+1]
events.mask = 0
pulses = self.read_chopper_trigger_stream(packets)
current = self.read_proton_current_stream(packets)
# read parameter logs not present in old files to ensure they are in self._log_keys if they exist
_monitor = self.rv('monitor', default=None)
_acquisition_filter = self.rv('acquisition_filter', default=None)
self.data = AmorEventStream(events, packets, pulses, current)
if self.first_index>0 and not self.EOF:
@@ -410,9 +484,11 @@ class AmorEventData(AmorHeader):
self.data.device_logs[key] = data
def update_info_from_logs(self):
RELEVANT_ITEMS = ['sample_temperature', 'sample_magnetic_field', 'polarization_config_label']
relevant_items = ['polarization_config_label']
sample_keys = [f'sample_{param}' for param in self.SEE_PARAMS]
relevant_items += sample_keys
for key, log in self.data.device_logs.items():
if key not in RELEVANT_ITEMS:
if key not in relevant_items:
continue
if log.value.dtype in [np.int8, np.int16, np.int32, np.int64]:
# for integer items (flags) report the most common one
@@ -425,24 +501,23 @@ class AmorEventData(AmorHeader):
value = log.value.mean()
if key == 'polarization_config_label':
self.instrument_settings.polarization = fileio.Polarization(const.polarizationConfigs[value])
elif key == 'sample_temperature':
self.sample.sample_parameters['temperature'].magnitue = value
elif key == 'sample_magnetic_field':
self.sample.sample_parameters['magnetic_field'].magnitue = value
elif key in sample_keys:
param = key.split('_', 1)[1]
self.sample.sample_parameters[param].magnitue = value
def read_chopper_trigger_stream(self, packets):
chopper1TriggerTime = np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_zero'][:-2], dtype=np.int64)
#self.chopper2TriggerTime = self.chopper1TriggerTime + np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time'][:-2], dtype=np.int64)
# + np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_offset'][:], dtype=np.int64)
trigger_group = self.hdf[self.hdf_paths['trigger'][0]]
chopper1TriggerTime = np.array(trigger_group['event_time_zero'][:-2], dtype=np.int64)
if np.shape(chopper1TriggerTime)[0] > 2:
startTime = chopper1TriggerTime[0]
pulseTimeS = chopper1TriggerTime
else:
logging.critical(' No chopper trigger data available, using event steram instead, pulse filtering will fail!')
startTime = np.array(self.hdf['/entry1/Amor/detector/data/event_time_zero'][0], dtype=np.int64)
stopTime = np.array(self.hdf['/entry1/Amor/detector/data/event_time_zero'][-2], dtype=np.int64)
data_group = self.hdf[self.hdf_paths['data'][0]]
startTime = np.array(data_group['event_time_zero'][0], dtype=np.int64)
stopTime = np.array(data_group['event_time_zero'][-2], dtype=np.int64)
pulseTimeS = np.arange(startTime, stopTime, self.timing.tau*1e9, dtype=np.int64)
pulses = np.recarray(pulseTimeS.shape, dtype=PULSE_TYPE)
pulses.time = pulseTimeS
@@ -454,12 +529,13 @@ class AmorEventData(AmorHeader):
return pulses
def read_proton_current_stream(self, packets):
proton_current = np.recarray(self.hdf['entry1/Amor/detector/proton_current/time'].shape, dtype=PC_TYPE)
proton_current.time = self.hdf['entry1/Amor/detector/proton_current/time'][:]
if self.hdf['entry1/Amor/detector/proton_current/value'].ndim==1:
proton_current.current = self.hdf['entry1/Amor/detector/proton_current/value'][:]
pc_group = self.hdf[self.hdf_paths['proton_current'][0]]
proton_current = np.recarray(pc_group['time'].shape, dtype=PC_TYPE)
proton_current.time = pc_group['time'][:]
if pc_group['value'].ndim==1:
proton_current.current = pc_group['value'][:]
else:
proton_current.current = self.hdf['entry1/Amor/detector/proton_current/value'][:,0]
proton_current.current = pc_group['value'][:,0]
if self.first_index>0 or not self.EOF:
proton_current = proton_current[(proton_current.time>=packets.time[0])&