22 Commits

Author SHA1 Message Date
95a1ffade4 Add theta filter for issues with parts of incoming divergence
Some checks failed
Release / build-ubuntu-latest (push) Failing after 2s
Release / build-windows (push) Has been cancelled
Release / release (push) Has been cancelled
2025-10-10 15:35:21 +02:00
4ee1cf7ea7 Fix automatic file switching in events2histogram 2025-10-10 15:08:21 +02:00
c90bdd3316 Bump version 2025-10-10 09:31:55 +02:00
ac9babb9ad Fix unwanted matplotlib import in projection.py
Some checks failed
Release / build-ubuntu-latest (push) Failing after 2s
Release / build-windows (push) Has been cancelled
Release / release (push) Has been cancelled
2025-10-10 09:28:52 +02:00
aa5b540aed fix jochen colormap in eos call, as it's not registered there 2025-10-10 09:25:01 +02:00
86d676ccc8 Update release date for 3.0.1 2025-10-10 09:23:31 +02:00
f8daa93c48 Add update to scale for last mesh plot 2025-10-10 09:23:04 +02:00
4c2e344d78 Add raw data overview graph with fast reduction 2025-10-10 09:14:42 +02:00
469e1c0ab0 Add theta projection 2025-10-10 08:50:57 +02:00
a947dfd398 Add lambda projection 2025-10-09 16:50:19 +02:00
4e2269bdae Add Jochen colormap, LT and YT map automatic colorscale adjustment 2025-10-09 13:46:52 +02:00
3f93ec2017 Add Tof projection 2025-10-08 16:50:17 +02:00
9d788da2f1 Minor fixed and addition of linear color plots 2025-10-08 15:21:58 +02:00
f16feac748 Add QProjection and combined projection as well as normalization model 2025-10-08 14:14:02 +02:00
709a0c5392 Add TofZProjection 2025-10-08 09:26:14 +02:00
b71b72c6a3 Add YZProjection 2025-10-08 08:54:39 +02:00
2972ffd5d8 Add option to show line in LT plot with q-value 2025-10-07 16:50:33 +02:00
31201795b0 implement events2histogram for LZ graph and automatic update 2025-10-07 16:19:57 +02:00
99af021b3e separte hdf file header reading, start events2histogram and fix test 2025-10-07 11:20:56 +02:00
aacbe3ed6f separate AssociatePulseWithMonitor and FilterMonitorThreshold to allow monitor use without wallTime 2025-10-07 10:29:02 +02:00
6c0c2fcab8 rename AmorReduction to ReflectivityReduction and use single config object to stay comparable with future reductions 2025-10-07 08:48:15 +02:00
db5713ab56 bump revision 2025-10-06 18:18:57 +02:00
20 changed files with 1373 additions and 1370 deletions

View File

@@ -2,5 +2,5 @@
Package to handle data redction at AMOR instrument to be used by __main__.py script.
"""
__version__ = '3.0.0'
__date__ = '2025-10-06'
__version__ = '3.0.2'
__date__ = '2025-10-10'

View File

@@ -8,30 +8,31 @@ Author: Jochen Stahn (algorithms, python draft),
import logging
# need to do absolute import here as pyinstaller requires it
from eos.options import EOSConfig, ReaderConfig, ExperimentConfig, ReductionConfig, OutputConfig
from eos.options import ReflectivityConfig, ReaderConfig, ExperimentConfig, ReflectivityReductionConfig, ReflectivityOutputConfig
from eos.command_line import commandLineArgs
from eos.logconfig import setup_logging, update_loglevel
def main():
setup_logging()
# read command line arguments and generate classes holding configuration parameters
clas = commandLineArgs([ReaderConfig, ExperimentConfig, ReductionConfig, OutputConfig],
clas = commandLineArgs([ReaderConfig, ExperimentConfig, ReflectivityReductionConfig, ReflectivityOutputConfig],
'eos')
update_loglevel(clas.verbose)
reader_config = ReaderConfig.from_args(clas)
experiment_config = ExperimentConfig.from_args(clas)
reduction_config = ReductionConfig.from_args(clas)
output_config = OutputConfig.from_args(clas)
config = EOSConfig(reader_config, experiment_config, reduction_config, output_config)
reduction_config = ReflectivityReductionConfig.from_args(clas)
output_config = ReflectivityOutputConfig.from_args(clas)
config = ReflectivityConfig(reader_config, experiment_config, reduction_config, output_config)
logging.warning('######## eos - data reduction for Amor ########')
# only import heavy module if sufficient command line parameters were provided
from eos.reduction import AmorReduction
from eos.reduction_reflectivity import ReflectivityReduction
# Create reducer with these arguments
reducer = AmorReduction(config)
reducer = ReflectivityReduction(config)
# Perform actual reduction
reducer.reduce()

View File

@@ -1,10 +1,10 @@
import argparse
from typing import List
from typing import List, Type
from .options import ArgParsable
def commandLineArgs(config_items: List[ArgParsable], program_name=None):
def commandLineArgs(config_items: List[Type[ArgParsable]], program_name=None):
"""
Process command line argument.
The type of the default values is used for conversion and validation.

42
eos/e2h.py Normal file
View File

@@ -0,0 +1,42 @@
"""
events2histogram vizualising data from Amor@SINQ, PSI
Author: Jochen Stahn (algorithms, python draft),
Artur Glavic (structuring and optimisation of code)
"""
import logging
# need to do absolute import here as pyinstaller requires it
from eos.options import E2HConfig, ReaderConfig, ExperimentConfig, E2HReductionConfig
from eos.command_line import commandLineArgs
from eos.logconfig import setup_logging, update_loglevel
def main():
setup_logging()
logging.getLogger('matplotlib').setLevel(logging.WARNING)
# read command line arguments and generate classes holding configuration parameters
clas = commandLineArgs([ReaderConfig, ExperimentConfig, E2HReductionConfig],
'events2histogram')
update_loglevel(clas.verbose)
reader_config = ReaderConfig.from_args(clas)
experiment_config = ExperimentConfig.from_args(clas)
reduction_config = E2HReductionConfig.from_args(clas)
config = E2HConfig(reader_config, experiment_config, reduction_config)
logging.warning('######## events2histogram - data vizualization for Amor ########')
from eos.reduction_e2h import E2HReduction
# only import heavy module if sufficient command line parameters were provided
from eos.reduction_reflectivity import ReflectivityReduction
# Create reducer with these arguments
reducer = E2HReduction(config)
# Perform actual reduction
reducer.reduce()
logging.info('######## events2histogram - finished ########')
if __name__ == '__main__':
main()

View File

@@ -18,7 +18,7 @@ class ExtractWalltime(EventDataAction):
def perform_action(self, dataset: EventDatasetProtocol) ->None:
wallTime = extract_walltime(dataset.data.events.tof,
dataset.data.packets.start_index,
dataset.data.packets.Time)
dataset.data.packets.time)
logging.debug(f' expending event stream by wallTime')
new_events = append_fields(dataset.data.events, [('wallTime', wallTime.dtype)])
new_events.wallTime = wallTime

View File

@@ -31,7 +31,7 @@ class AmorTiming:
# Structured datatypes used for event streams
EVENT_TYPE = np.dtype([('tof', np.float64), ('pixelID', np.uint32), ('mask', np.int32)])
PACKET_TYPE = np.dtype([('start_index', np.uint32), ('Time', np.int64)])
PACKET_TYPE = np.dtype([('start_index', np.uint32), ('time', np.int64)])
PULSE_TYPE = np.dtype([('time', np.int64), ('monitor', np.float32)])
PC_TYPE = np.dtype([('current', np.float32), ('time', np.int64)])

View File

@@ -77,49 +77,28 @@ class CorrectSeriesTime(EventDataAction):
class AssociatePulseWithMonitor(EventDataAction):
def __init__(self, monitorType:MonitorType, lowCurrentThreshold:float):
def __init__(self, monitorType:MonitorType):
self.monitorType = monitorType
self.lowCurrentThreshold = lowCurrentThreshold
def perform_action(self, dataset: EventDatasetProtocol)->None:
logging.debug(f' using monitor type {self.monitorType}')
if self.monitorType in [MonitorType.proton_charge or MonitorType.debug]:
if not 'wallTime' in dataset.data.events.dtype.names:
raise ValueError(
"AssociatePulseWithMonitor requires walltTime to be extracted, please run ExtractWalltime first")
monitorPerPulse = self.get_current_per_pulse(dataset.data.pulses.time,
dataset.data.pulses.monitor = self.get_current_per_pulse(dataset.data.pulses.time,
dataset.data.proton_current.time,
dataset.data.proton_current.current)\
* 2*dataset.timing.tau * 1e-3
# filter low-current pulses
dataset.data.pulses.monitor = np.where(
monitorPerPulse > 2*dataset.timing.tau * self.lowCurrentThreshold * 1e-3,
monitorPerPulse, 0)
elif self.monitorType==MonitorType.time:
dataset.data.pulses.monitor = 2*dataset.timing.tau
else: # pulses
dataset.data.pulses.monitor = 1
if self.monitorType == MonitorType.debug:
if not 'wallTime' in dataset.data.events.dtype.names:
raise ValueError(
"AssociatePulseWithMonitor requires walltTime for debugging, please run ExtractWalltime first")
cpp, t_bins = np.histogram(dataset.data.events.wallTime, dataset.data.pulses.time)
np.savetxt('tme.hst', np.vstack((dataset.data.pulses.time[:-1], cpp, dataset.data.pulses.monitor[:-1])).T)
if self.monitorType in [MonitorType.proton_charge or MonitorType.debug]:
self.monitor_threshold(dataset)
def monitor_threshold(self, dataset):
goodTimeS = dataset.data.pulses.time[dataset.data.pulses.monitor!=0]
filter_e = np.logical_not(np.isin(dataset.data.events.wallTime, goodTimeS))
dataset.data.events.mask += EVENT_BITMASKS['MonitorThreshold']*filter_e
logging.info(f' low-beam (<{self.lowCurrentThreshold} mC) rejected pulses: '
f'{dataset.data.pulses.monitor.shape[0]-goodTimeS.shape[0]} '
f'out of {dataset.data.pulses.monitor.shape[0]}')
logging.info(f' with {filter_e.sum()} events')
if goodTimeS.shape[0]:
logging.info(f' average counts per pulse = {dataset.data.events.shape[0]/goodTimeS.shape[0]:7.1f}')
else:
logging.info(f' average counts per pulse = undefined')
@staticmethod
def get_current_per_pulse(pulseTimeS, currentTimeS, currents):
# add currents for early pulses and current time value after last pulse (j+1)
@@ -134,6 +113,28 @@ class AssociatePulseWithMonitor(EventDataAction):
pulseCurrentS[i] = currents[j]
return pulseCurrentS
class FilterMonitorThreshold(EventDataAction):
def __init__(self, lowCurrentThreshold:float):
self.lowCurrentThreshold = lowCurrentThreshold
def perform_action(self, dataset: EventDatasetProtocol) ->None:
if not 'wallTime' in dataset.data.events.dtype.names:
raise ValueError(
"FilterMonitorThreshold requires walltTime to be extracted, please run ExtractWalltime first")
low_current_filter = dataset.data.pulses.monitor>2*dataset.timing.tau*self.lowCurrentThreshold*1e-3
dataset.data.pulses.monitor[np.logical_not(low_current_filter)] = 0.
goodTimeS = dataset.data.pulses.time[low_current_filter]
filter_e = np.logical_not(np.isin(dataset.data.events.wallTime, goodTimeS))
dataset.data.events.mask += EVENT_BITMASKS['MonitorThreshold']*filter_e
logging.info(f' low-beam (<{self.lowCurrentThreshold} mC) rejected pulses: '
f'{dataset.data.pulses.monitor.shape[0]-goodTimeS.shape[0]} '
f'out of {dataset.data.pulses.monitor.shape[0]}')
logging.info(f' with {filter_e.sum()} events')
if goodTimeS.shape[0]:
logging.info(f' average counts per pulse = {dataset.data.events.shape[0]/goodTimeS.shape[0]:7.1f}')
else:
logging.info(f' average counts per pulse = undefined')
class FilterStrangeTimes(EventDataAction):
def perform_action(self, dataset: EventDatasetProtocol)->None:
@@ -148,6 +149,9 @@ class TofTimeCorrection(EventDataAction):
self.correct_chopper_opening = correct_chopper_opening
def perform_action(self, dataset: EventDatasetProtocol) ->None:
if not 'delta' in dataset.data.events.dtype.names:
raise ValueError(
"TofTimeCorrection requires delta to be extracted, please run AnalyzePixelIDs first")
d = dataset.data
if self.correct_chopper_opening:
d.events.tof -= ( d.events.delta / 180. ) * dataset.timing.tau

View File

@@ -30,69 +30,45 @@ AMOR_LOCAL_TIMEZONE = zoneinfo.ZoneInfo(key='Europe/Zurich')
if platform.node().startswith('amor'):
NICOS_CACHE_DIR = '/home/amor/nicosdata/amor/cache/'
GREP = '/usr/bin/grep "%s"'
GREP = '/usr/bin/grep "value"'
else:
NICOS_CACHE_DIR = None
class AmorEventData:
class AmorHeader:
"""
Read one amor NeXus datafile and extract relevant header information.
Implements EventDatasetProtocol
Collects header information from Amor NeXus fiel without reading event data.
"""
file_list: List[str]
first_index: int
last_index: int = -1
EOF: bool = False
max_events: int
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
instrument_settings: fileio.InstrumentSettings
geometry: AmorGeometry
timing: AmorTiming
data: AmorEventStream
eventStartTime: np.int64
def __init__(self, fileName:Union[str, h5py.File, BinaryIO], first_index:int=0, max_events:int=100_000_000):
def __init__(self, fileName:Union[str, h5py.File, BinaryIO]):
if type(fileName) is str:
self.file_list = [fileName]
logging.debug(f' opening file {fileName}')
self.hdf = h5py.File(fileName, 'r', swmr=True)
elif type(fileName) is h5py.File:
self.file_list = [fileName.filename]
self.hdf = fileName
else:
self.file_list = [repr(fileName)]
self.hdf = h5py.File(fileName, 'r')
self.first_index = first_index
self.max_events = max_events
self.read_header_info()
self.read_instrument_configuration()
self.read_event_stream()
# actions applied to any dataset
self.read_chopper_trigger_stream()
self.read_proton_current_stream()
if type(fileName) is str:
# close the input file to free memory, only if the file was opened in this object
self.hdf.close()
del(self.hdf)
def _replace_if_missing(self, key, nicos_key, dtype=float):
def _replace_if_missing(self, key, nicos_key, dtype=float, suffix=''):
try:
return dtype(self.hdf[f'/entry1/Amor/{key}'][0])
except(KeyError, IndexError):
if NICOS_CACHE_DIR:
try:
logging.warning(f" using parameter {nicos_key} from nicos cache")
logging.info(f" using parameter {nicos_key} from nicos cache")
year_date = self.fileDate.strftime('%Y')
value = str(subprocess.getoutput(f'{GREP} {NICOS_CACHE_DIR}nicos-{nicos_key}/{year_date}')).split('\t')[-1]
call = f'{GREP} {NICOS_CACHE_DIR}nicos-{nicos_key}/{year_date}{suffix}'
value = str(subprocess.getoutput(call)).split('\t')[-1]
return dtype(value)
except Exception:
logging.error("Couldn't get value from nicos cache", exc_info=True)
logging.error(f"Couldn't get value from nicos cache {nicos_key}, {call}")
return dtype(0)
else:
logging.warning(f" parameter {key} not found, relpace by zero")
@@ -177,7 +153,7 @@ class AmorEventData:
chopperSeparation, detectorDistance, chopperDetectorDistance)
self.timing = AmorTiming(ch1TriggerPhase, ch2TriggerPhase, chopperSpeed, chopperPhase, tau)
polarizationConfigLabel = self._replace_if_missing('polarization/configuration/average_value', 'polarizer_config_label', int)
polarizationConfigLabel = self._replace_if_missing('polarization/configuration/average_value', 'polarization_config_label', int, suffix='/*')
polarizationConfig = fileio.Polarization(polarizationConfigs[polarizationConfigLabel])
logging.debug(f' polarization configuration: {polarizationConfig} (index {polarizationConfigLabel})')
@@ -223,6 +199,52 @@ class AmorEventData:
header.sample = self.sample
header.measurement_instrument_settings = self.instrument_settings
class AmorEventData(AmorHeader):
"""
Read one amor NeXus datafile and extract relevant header information.
Implements EventDatasetProtocol
"""
file_list: List[str]
first_index: int
last_index: int = -1
EOF: bool = False
max_events: int
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
instrument_settings: fileio.InstrumentSettings
geometry: AmorGeometry
timing: AmorTiming
data: AmorEventStream
eventStartTime: np.int64
def __init__(self, fileName:Union[str, h5py.File, BinaryIO], first_index:int=0, max_events:int=100_000_000):
if type(fileName) is str:
logging.debug(f' opening file {fileName}')
self.file_list = [fileName]
hdf = h5py.File(fileName, 'r', swmr=True)
elif type(fileName) is h5py.File:
self.file_list = [fileName.filename]
hdf = fileName
else:
self.file_list = [repr(fileName)]
hdf = h5py.File(fileName, 'r')
self.first_index = first_index
self.max_events = max_events
super().__init__(hdf)
self.hdf = hdf
self.read_event_stream()
if type(fileName) is str:
# close the input file to free memory, only if the file was opened in this object
self.hdf.close()
del(self.hdf)
def read_event_stream(self):
"""
Read the actual event data from file. If file is too large, find event index from packets
@@ -230,12 +252,13 @@ class AmorEventData:
"""
packets = np.recarray(self.hdf['/entry1/Amor/detector/data/event_index'].shape, dtype=PACKET_TYPE)
packets.start_index = self.hdf['/entry1/Amor/detector/data/event_index'][:]
packets.Time = self.hdf['/entry1/Amor/detector/data/event_time_zero'][:]
packets.time = self.hdf['/entry1/Amor/detector/data/event_time_zero'][:]
try:
# packet index that matches first event index
start_packet = int(np.where(packets.start_index==self.first_index)[0][0])
except IndexError:
raise IndexError(f'No event packet found starting at event #{self.first_index}')
raise EOFError(f'No event packet found starting at event #{self.first_index}, '
f'number of events is {self.hdf["/entry1/Amor/detector/data/event_time_offset"].shape[0]}')
packets = packets[start_packet:]
nevts = self.hdf['/entry1/Amor/detector/data/event_time_offset'].shape[0]
@@ -256,15 +279,15 @@ class AmorEventData:
events.pixelID = self.hdf['/entry1/Amor/detector/data/event_id'][self.first_index:self.last_index+1]
events.mask = 0
pulses = self.read_chopper_trigger_stream()
current = self.read_proton_current_stream()
pulses = self.read_chopper_trigger_stream(packets)
current = self.read_proton_current_stream(packets)
self.data = AmorEventStream(events, packets, pulses, current)
if self.first_index>0 and not self.EOF:
# label the file name if not all events were used
self.file_list[0] += f'[{self.first_index}:{self.last_index}]'
def read_chopper_trigger_stream(self):
def read_chopper_trigger_stream(self, packets):
chopper1TriggerTime = np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_zero'][:-2], dtype=np.int64)
#self.chopper2TriggerTime = self.chopper1TriggerTime + np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time'][:-2], dtype=np.int64)
# + np.array(self.hdf['entry1/Amor/chopper/ch2_trigger/event_time_offset'][:], dtype=np.int64)
@@ -281,17 +304,17 @@ class AmorEventData:
pulses.monitor = 1. # default is monitor pulses as it requires no calculation
# apply filter in case the events were filtered
if self.first_index>0 or not self.EOF:
pulses = pulses[(pulses.time>=self.data.packets.Time[0])&(pulses.time<=self.data.packets.Time[-1])]
pulses = pulses[(pulses.time>=packets.time[0])&(pulses.time<=packets.time[-1])]
self.eventStartTime = startTime
return pulses
def read_proton_current_stream(self):
def read_proton_current_stream(self, packets):
proton_current = np.recarray(self.hdf['entry1/Amor/detector/proton_current/time'].shape, dtype=PC_TYPE)
proton_current.time = self.hdf['entry1/Amor/detector/proton_current/time'][:]
proton_current.current = self.hdf['entry1/Amor/detector/proton_current/value'][:,0]
if self.first_index>0 or not self.EOF:
proton_current = proton_current[(proton_current.time>=self.data.packets.Time[0])&
(proton_current.time<=self.data.packets.Time[-1])]
proton_current = proton_current[(proton_current.time>=packets.time[0])&
(proton_current.time<=packets.time[-1])]
return proton_current
def info(self):

View File

@@ -5,6 +5,7 @@ Class to handle Orso header information that changes gradually during the reduct
import platform
import sys
from datetime import datetime
from typing import List, Literal
from orsopy import fileio
@@ -13,13 +14,16 @@ from . import __version__
class Header:
"""orso compatible output file header content"""
owner: fileio.Person
experiment: fileio.Experiment
sample: fileio.Sample
measurement_instrument_settings: fileio.InstrumentSettings
measurement_scheme: Literal["angle- and energy-dispersive", "angle-dispersive", "energy-dispersive"]
measurement_data_files: List[fileio.File]
measurement_additional_files: List[fileio.File]
def __init__(self):
self.owner = None
self.experiment = None
self.sample = None
self.measurement_instrument_settings = None
self.measurement_scheme = None
self.measurement_data_files = []
self.measurement_additional_files = []
@@ -64,10 +68,3 @@ class Header:
if columns is None:
columns = self.columns()
return fileio.Orso(ds, red, columns+extra_columns)
#-------------------------------------------------------------------------------------------------
def create_call_string(self):
callString = ' '.join(sys.argv)
if '-Y' not in callString:
callString += f' -Y {datetime.now().year}'
return callString
#-------------------------------------------------------------------------------------------------

View File

@@ -99,6 +99,7 @@ class LZGrid:
@cache
def z(self):
# TODO: shouldn't this be -0.5 to be the edges of each pixel?
return np.arange(Detector.nBlades*Detector.nWires+1)
@cache

View File

@@ -66,6 +66,30 @@ class LZNormalisation:
self.monitor = 1.
return self
@classmethod
def model(cls, grid:LZGrid) -> 'LZNormalisation':
# generate a normalization based on angular and wavelength distribution model
# TODO: add options for sample size for better absolute normalization
logging.warning(f'normalisation is model')
self = super().__new__(cls)
self.angle = 1.0
self.monitor = 4e6
lamda_l = grid.lamda()
lamda_c = (lamda_l[:-1]+lamda_l[1:])/2
delta = np.rad2deg(np.arctan2(grid.z(), Detector.distance))/2.0
delta_c = (delta[:-1]+delta[1:])/2-delta.mean()
# approximate spectrum by Maxwell-Boltzmann and intensity by linear footprint
a = 3.8
Ilambda = np.sqrt(2./np.pi)*lamda_c**2/a**3*np.exp(-lamda_c**2/(2.*a**2))
Idelta = np.where(abs(delta_c)<0.75, (self.angle-delta_c), np.nan)
self.norm = 1e6*Ilambda[:, np.newaxis]*Idelta[np.newaxis, :]
return self
def safe(self, filename, hash):
with open(filename, 'wb') as fh:
np.save(fh, hash, allow_pickle=False)

View File

@@ -10,6 +10,7 @@ import numpy as np
import logging
try:
from enum import StrEnum
except ImportError:
@@ -90,10 +91,12 @@ class ArgParsable:
if get_origin(typ) is list:
args['nargs'] = '+'
typ = get_args(typ)[0]
if get_origin(typ) is tuple:
# tuple of items are put together during evaluation
typ = get_args(typ)[0]
elif get_origin(typ) is tuple:
args['nargs'] = len(get_args(typ))
typ = get_args(typ)[0]
if issubclass(typ, StrEnum):
args['choices'] = [ci.value for ci in typ]
if field.default is not MISSING:
@@ -148,7 +151,12 @@ class ArgParsable:
if get_origin(field.type) is Union and type(None) in get_args(field.type):
# optional argument
typ = get_args(field.type)[0]
if get_origin(typ) is list:
item_typ = get_args(typ)[0]
if get_origin(item_typ) is tuple:
# tuple of items are put together during evaluation
tuple_length = len(get_args(item_typ))
value = [tuple(value[i*tuple_length+j] for j in range(tuple_length)) for i in range(len(value)//tuple_length)]
if isinstance(typ, type) and issubclass(typ, StrEnum):
# convert str to enum
try:
@@ -201,6 +209,14 @@ class MonitorType(StrEnum):
neutron_monitor = 'n'
debug = 'x'
MONITOR_UNITS = {
MonitorType.neutron_monitor: 'cnts',
MonitorType.proton_charge: 'mC',
MonitorType.time: 's',
MonitorType.auto: 'various',
MonitorType.debug: 'mC',
}
@dataclass
class ExperimentConfig(ArgParsable):
chopperPhase: float = field(
@@ -308,7 +324,7 @@ class NormalisationMethod(StrEnum):
under_illuminated = 'u'
@dataclass
class ReductionConfig(ArgParsable):
class ReflectivityReductionConfig(ArgParsable):
fileIdentifier: List[str] = field(
metadata={
'short': 'f',
@@ -350,6 +366,14 @@ class ReductionConfig(ArgParsable):
'help': 'theta region of interest w.r.t. beam center',
},
)
thetaFilters: List[Tuple[float, float]] = field(
default_factory=lambda: [],
metadata={
'short': 'TF',
'group': 'region of interest',
'help': 'add one or more theta ranges that will be filtered in reduction',
},
)
normalisationMethod: NormalisationMethod = field(
default=NormalisationMethod.over_illuminated,
metadata={
@@ -395,33 +419,6 @@ class ReductionConfig(ArgParsable):
},
)
def _expand_file_list(self, short_notation:str):
"""Evaluate string entry for file number lists"""
file_list=[]
for i in short_notation.split(','):
if '-' in i:
if ':' in i:
step = i.split(':', 1)[1]
file_list += range(int(i.split('-', 1)[0]),
int((i.rsplit('-', 1)[1]).split(':', 1)[0])+1,
int(step))
else:
step = 1
file_list += range(int(i.split('-', 1)[0]),
int(i.split('-', 1)[1])+1,
int(step))
else:
file_list += [int(i)]
file_list.sort()
return file_list
def data_files(self):
# get input files from expanding fileIdentifier
return list(map(self._expand_file_list, self.fileIdentifier))
def normal_files(self):
return list(map(self._expand_file_list, self.normalisationFileIdentifier))
class OutputFomatOption(StrEnum):
Rqz_ort = "Rqz.ort"
@@ -440,9 +437,10 @@ class PlotColormaps(StrEnum):
inferno = "inferno"
gist_rainbow = "gist_rainbow"
nipy_spectral = "nipy_spectral"
jochen_deluxe = "jochen_deluxe"
@dataclass
class OutputConfig(ArgParsable):
class ReflectivityOutputConfig(ArgParsable):
outputFormats: List[OutputFomatOption] = field(
default_factory=lambda: ['Rqz.ort'],
metadata={
@@ -511,11 +509,11 @@ class OutputConfig(ArgParsable):
# ===================================
@dataclass
class EOSConfig:
class ReflectivityConfig:
reader: ReaderConfig
experiment: ExperimentConfig
reduction: ReductionConfig
output: OutputConfig
reduction: ReflectivityReductionConfig
output: ReflectivityOutputConfig
_call_string_overwrite=None
@@ -608,4 +606,136 @@ class EOSConfig:
logging.debug(f'Argument list build in EOSConfig.call_string: {mlst}')
return mlst
class E2HPlotSelection(StrEnum):
All = 'all'
Raw = 'raw'
YZ = 'Iyz'
LT = 'Ilt'
YT = 'Iyt'
TZ = 'Itz'
Q = 'Iq'
L = 'Il'
T = 'It'
ToF = 'tof'
class E2HPlotArguments(StrEnum):
Default = 'def'
OutputFile = 'file'
Logarithmic = 'log'
Linear = 'lin'
@dataclass
class E2HReductionConfig(ArgParsable):
fileIdentifier: str = field(
default='0',
metadata={
'short': 'f',
'priority': 100,
'group': 'input data',
'help': 'file number(s) or offset (if < 1), events2histogram only accepts one short code',
},
)
show_plot: bool = field(
default=False,
metadata={
'short': 'sp',
'group': 'output',
'help': 'show matplotlib graphs of results',
},
)
plot: E2HPlotSelection = field(
default=E2HPlotSelection.All,
metadata={
'short': 'p',
'group': 'output',
'help': 'select what to plot or write',
},
)
plotArgs: E2HPlotArguments = field(
default=E2HPlotArguments.Default,
metadata={
'short': 'pa',
'group': 'output',
'help': 'select configuration for plot',
},
)
update: bool = field(
default=False,
metadata={
'short': 'u',
'group': 'output',
'help': 'keep running in the background and update plot when file is modified, implies --plot',
},
)
fast: bool = field(
default=False,
metadata={
'group': 'input data',
'help': 'skip some reduction steps to speed up calculations',
},
)
normalizationModel: bool = field(
default=False,
metadata={
'short': 'nm',
'group': 'input data',
'help': 'use model for incoming spectrum and divergence to normalize for reflectivity',
},
)
plot_colormap: PlotColormaps = field(
default=PlotColormaps.jochen_deluxe,
metadata={
'short': 'pcmap',
'group': 'output',
'help': 'matplotlib colormap used in lambda-theta graphs when plotting',
},
)
max_events: int = field(
default = 10_000_000,
metadata={
'group': 'input data',
'help': 'maximum number of events read at once',
},
)
thetaRangeR: Tuple[float, float] = field(
default_factory=lambda: [-0.75, 0.75],
metadata={
'short': 'T',
'group': 'region of interest',
'help': 'theta region of interest w.r.t. beam center',
},
)
thetaFilters: List[Tuple[float, float]] = field(
default_factory=lambda: [],
metadata={
'short': 'TF',
'group': 'region of interest',
'help': 'add one or more theta ranges that will be filtered in reduction',
},
)
fontsize: float = field(
default=8.,
metadata={
'short': 'pf',
'group': 'output',
'help': 'font size for graphs',
},
)
@dataclass
class E2HConfig:
reader: ReaderConfig
experiment: ExperimentConfig
reduction: E2HReductionConfig

View File

@@ -1,6 +1,7 @@
"""
Defines how file paths are resolved from short_notation, year and number to filename.
"""
import logging
import os
from typing import List
@@ -18,7 +19,7 @@ class PathResolver:
"""Evaluate string entry for file number lists"""
file_list = []
for i in short_notation.split(','):
if '-' in i:
if '-' in i and not i.startswith('-'):
if ':' in i:
step = i.split(':', 1)[1]
file_list += range(int(i.split('-', 1)[0]),
@@ -34,6 +35,8 @@ class PathResolver:
return list(sorted(file_list))
def get_path(self, number):
if number<=0:
number = self.search_latest(number)
fileName = f'amor{self.year}n{number:06d}.hdf'
path = ''
for rawd in self.rawPath:
@@ -41,9 +44,39 @@ class PathResolver:
path = rawd
break
if not path:
if os.path.exists(
f'/afs/psi.ch/project/sinqdata/{self.year}/amor/{int(number/1000)}/{fileName}'):
path = f'/afs/psi.ch/project/sinqdata/{self.year}/amor/{int(number/1000)}'
from glob import glob
potential_file = glob(f'/home/amor/data/{self.year}/*/{fileName}')
if len(potential_file)>0:
path = os.path.dirname(potential_file[0])
else:
raise FileNotFoundError(f'# ERROR: the file {fileName} can not be found in {self.rawPath}')
raise FileNotFoundError(f'# ERROR: the file {fileName} can not be found '
f'in {self.rawPath+["/home/amor/data"]}')
return os.path.join(path, fileName)
def search_latest(self, number):
if number>0:
raise ValueError('number needs to be relative index (negative)')
if os.path.exists(f'/home/amor/data/{self.year}/DataNumber'):
try:
with open(f'/home/amor/data/{self.year}/DataNumber', 'r') as fh:
current_index = int(fh.readline())-1
except Exception:
logging.error('Can not access DataNumber', exc_info=True)
else:
return current_index+number
# find all files from the given year, convert to number and return latest
from glob import glob
possible_files = []
for rawd in self.rawPath:
possible_files += glob(os.path.join(rawd, f'amor{self.year}n??????.hdf'))
possible_files += glob(f'/home/amor/data/{self.year}/*/amor{self.year}n??????.hdf')
possible_indices = list(set([int(os.path.basename(fi)[9:15]) for fi in possible_files]))
possible_indices.sort()
try:
return possible_indices[number-1]
except IndexError:
raise FileNotFoundError(f'# Could not find suitable file for relative index {number} '
f'in {self.rawPath+["/home/amor/data"]}, '
f'possible indices {possible_indices}')

View File

@@ -3,6 +3,10 @@ Classes used to calculate projections/binnings from event data onto given grids.
"""
import logging
import warnings
from abc import ABC, abstractmethod
from typing import List, Tuple, Union
import numpy as np
from dataclasses import dataclass
@@ -10,6 +14,19 @@ from .event_data_types import EventDatasetProtocol
from .instrument import Detector, LZGrid
from .normalization import LZNormalisation
class ProjectionInterface(ABC):
@abstractmethod
def project(self, dataset: EventDatasetProtocol, monitor: float): ...
@abstractmethod
def clear(self): ...
@abstractmethod
def plot(self, **kwargs): ...
@abstractmethod
def update_plot(self): ...
@dataclass
class ProjectedReflectivity:
R: np.ndarray
@@ -68,20 +85,24 @@ class ProjectedReflectivity:
self.R -= R
self.dR = np.sqrt(self.dR**2+dR**2)
def plot(self, **kwargs):
from matplotlib import pyplot as plt
plt.errorbar(self.Q, self.R, xerr=self.dQ, yerr=self.dR, **kwargs)
plt.yscale('log')
plt.xlabel('Q / $\\AA^{-1}$')
plt.ylabel('R')
class LZProjection:
class LZProjection(ProjectionInterface):
grid: LZGrid
lamda: np.ndarray
alphaF: np.ndarray
is_normalized: bool
data: np.recarray
_dtype = np.dtype([
('I', np.float64),
('mask', bool),
('ref', np.float64),
('err', np.float64),
('res', np.float64),
('qz', np.float64),
('qx', np.float64),
('norm', np.float64),
])
def __init__(self, tthh: float, grid: LZGrid):
self.grid = grid
@@ -95,16 +116,7 @@ class LZProjection:
self.lamda = lz_shape*lamda_c[:, np.newaxis]
self.alphaF = lz_shape*alphaF_z[np.newaxis, :]
self.data = np.zeros(self.alphaF.shape, dtype=[
('I', np.float64),
('mask', bool),
('ref', np.float64),
('err', np.float64),
('res', np.float64),
('qz', np.float64),
('qx', np.float64),
('norm', np.float64),
]).view(np.recarray)
self.data = np.zeros(self.alphaF.shape, dtype=self._dtype).view(np.recarray)
self.data.mask = True
self.monitor = 0.
@@ -165,6 +177,7 @@ class LZProjection:
Project dataset on grid and add to intensity.
Can be called multiple times to sequentially add events.
"""
# TODO: maybe move monitor calculation in here instead of reduction?
e = dataset.data.events
int_lz, *_ = np.histogram2d(e.lamda, e.detZ, bins = (self.grid.lamda(), self.grid.z()))
self.data.I += int_lz
@@ -172,6 +185,12 @@ class LZProjection:
# in case the intensity changed one needs to normalize again
self.is_normalized = False
def clear(self):
# empty data
self.data[:] = 0
self.data.mask = True
self.monitor = 0.
@property
def I(self):
output = self.data.I[:]
@@ -247,50 +266,6 @@ class LZProjection:
return ProjectedReflectivity(R_q, dR_q, (q_q[1:]+q_q[:-1])/2., dq_q)
############## potential speedup not used right now, needs to be tested ####################
@classmethod
def histogram2d_lz(cls, lamda_e, detZ_e, bins):
"""
Perform binning operation equivalent to numpy bin2d for the sepcial case
of the second dimension using integer positions (pre-defined pixels).
Based on the devide_bin algorithm below.
"""
dimension = bins[1].shape[0]-1
if not (np.array(bins[1])==np.arange(dimension+1)).all():
raise ValueError("histogram2d_lz requires second bin dimension to be contigous integer range")
binning = cls.devide_bin(lamda_e, detZ_e.astype(np.int64), bins[0], dimension)
return np.array(binning), bins[0], bins[1]
@classmethod
def devide_bin(cls, lambda_e, position_e, lamda_edges, dimension):
'''
Use a divide and conquer strategy to bin the data. For the actual binning the
numpy bincount function is used, as it is much faster than histogram for
counting of integer values.
:param lambda_e: Array of wavelength for each event
:param position_e: Array of positional indices for each event
:param lamda_edges: The edges of bins to be used for the histogram
:param dimension: position number of buckets in output arrray
:return: 2D list of dimensions (lambda, x) of counts
'''
if len(lambda_e)==0:
# no more events in range, return empty bins
return [np.zeros(dimension, dtype=np.int64).tolist()]*(len(lamda_edges)-1)
if len(lamda_edges)==2:
# deepest recursion reached, all items should be within the two ToF edges
return [np.bincount(position_e, minlength=dimension).tolist()]
# split all events into two time of flight regions
split_idx = len(lamda_edges)//2
left_region = lambda_e<lamda_edges[split_idx]
left_list = cls.devide_bin(lambda_e[left_region], position_e[left_region],
lamda_edges[:split_idx+1], dimension)
right_region = np.logical_not(left_region)
right_list = cls.devide_bin(lambda_e[right_region], position_e[right_region],
lamda_edges[split_idx:], dimension)
return left_list+right_list
def plot(self, **kwargs):
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
@@ -302,16 +277,543 @@ class LZProjection:
cmap=False
if self.is_normalized:
if not 'norm' in kwargs:
kwargs['norm'] = LogNorm(2e-3, 2.0)
plt.pcolormesh(self.lamda, self.alphaF, self.data.ref, **kwargs)
if cmap:
plt.colorbar(label='R')
I = self.data.ref
else:
if not 'norm' in kwargs:
kwargs['norm'] = LogNorm()
plt.pcolormesh(self.lamda, self.alphaF, self.data.I, **kwargs)
if cmap:
I = self.data.I
if not 'norm' in kwargs:
vmin = I[(I>0)].min()
vmax = np.nanmax(I)
kwargs['norm'] = LogNorm(vmin, vmax, clip=True)
# suppress warning for wrongly sorted y-axis pixels (blades overlap)
with warnings.catch_warnings(action='ignore', category=UserWarning):
self._graph = plt.pcolormesh(self.lamda, self.alphaF, I, **kwargs)
if cmap:
if self.is_normalized:
plt.colorbar(label='R')
else:
plt.colorbar(label='I / cpm')
plt.xlabel('$\\lambda$ / $\\AA$')
plt.ylabel('$\\Theta$ / °')
plt.xlim(3., 12.)
af = self.alphaF[self.data.mask]
plt.ylim(af.min(), af.max())
plt.title('Wavelength vs. Reflection Angle')
self._graph_axis = plt.gca()
plt.connect('button_press_event', self.draw_qline)
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
from matplotlib.colors import LogNorm
if self.is_normalized:
I = self.data.ref
else:
I = self.data.I
if isinstance(self._graph.norm, LogNorm):
vmin = I[(I>0)].min()*0.5
else:
vmin = 0
vmax = np.nanmax(I)
self._graph.set_array(I)
self._graph.norm.vmin = vmin
self._graph.norm.vmax = vmax
if self.is_normalized:
self._graph.set_array(self.data.ref)
else:
self._graph.set_array(self.data.I)
def draw_qline(self, event):
if event.inaxes is not self._graph_axis:
return
from matplotlib import pyplot as plt
tbm = self._graph_axis.figure.canvas.manager.toolbar.mode
if event.button is plt.MouseButton.LEFT and tbm=='':
slope = event.ydata/event.xdata
xmax = 12.5
self._graph_axis.plot([0, xmax], [0, slope*xmax], '-', color='grey')
self._graph_axis.text(event.xdata, event.ydata, f'q={np.deg2rad(slope)*4.*np.pi:.3f}', backgroundcolor='white')
plt.draw()
if event.button is plt.MouseButton.RIGHT and tbm=='':
for art in list(self._graph_axis.lines)+list(self._graph_axis.texts):
art.remove()
plt.draw()
ONLY_MAP = ['colorbar', 'cmap', 'norm']
class ReflectivityProjector(ProjectionInterface):
lzprojection: LZProjection
data: ProjectedReflectivity
# TODO: maybe implement direct 1d projection in here
def __init__(self, lzprojection, norm):
self.lzprojection = lzprojection
self.norm = norm
def project(self, dataset: EventDatasetProtocol, monitor: float):
self.lzprojection.project(dataset, monitor)
self.lzprojection.normalize_over_illuminated(self.norm)
self.data = self.lzprojection.project_on_qz()
def clear(self):
self.lzprojection.clear()
def plot(self, **kwargs):
from matplotlib import pyplot as plt
for key in ONLY_MAP:
if key in kwargs: del(kwargs[key])
self._graph = plt.errorbar(self.data.Q, self.data.R, xerr=self.data.dQ, yerr=self.data.dR, **kwargs)
self._graph_axis = plt.gca()
plt.title('Reflectivity (might be improperly normalized)')
plt.yscale('log')
plt.xlabel('Q / $\\AA^{-1}$')
plt.ylabel('R')
def update_plot(self):
ln, _, (barsx, barsy) = self._graph
yerr_top = self.data.R+self.data.dR
yerr_bot = self.data.R-self.data.dR
xerr_top = self.data.Q+self.data.dQ
xerr_bot = self.data.Q-self.data.dQ
new_segments_x = [np.array([[xt, y], [xb, y]]) for xt, xb, y in zip(xerr_top, xerr_bot, self.data.R)]
new_segments_y = [np.array([[x, yt], [x, yb]]) for x, yt, yb in zip(self.data.Q, yerr_top, yerr_bot)]
barsx.set_segments(new_segments_x)
barsy.set_segments(new_segments_y)
ln.set_ydata(self.data.R)
class YZProjection(ProjectionInterface):
y: np.ndarray
z: np.ndarray
data: np.recarray
_dtype = np.dtype([
('cts', np.float64),
('I', np.float64),
('err', np.float64),
])
def __init__(self):
self.z = np.arange(Detector.nBlades*Detector.nWires+1)-0.5
self.y = np.arange(Detector.nStripes+1)-0.5
self.data = np.zeros((self.y.shape[0]-1, self.z.shape[0]-1), dtype=self._dtype).view(np.recarray)
self.monitor = 0.
def project(self, dataset: EventDatasetProtocol, monitor: float):
detYi, detZi, detX, delta = Detector.pixelLookUp[dataset.data.events.pixelID-1].T
cts , *_ = np.histogram2d(detYi, detZi, bins=(self.y, self.z))
self.data.cts += cts
self.monitor += monitor
self.data.I = self.data.cts / self.monitor
self.data.err = np.sqrt(self.data.cts) / self.monitor
def clear(self):
self.data[:] = 0
self.monitor = 0.
def plot(self, **kwargs):
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
if 'colorbar' in kwargs:
cmap=True
del(kwargs['colorbar'])
else:
cmap=False
vmax = self.data.I.max()
if not 'norm' in kwargs:
vmin = self.data.I[(self.data.I>0)].min()*0.5
kwargs['norm'] = LogNorm(vmin, vmax)
self._graph = plt.pcolormesh(self.y, self.z, self.data.I.T, **kwargs)
if cmap:
plt.colorbar(label='I / cpm')
plt.xlabel('Y')
plt.ylabel('Z')
plt.xlim(self.y[0], self.y[-1])
plt.ylim(self.z[-1], self.z[0])
plt.title('Horizontal Pixel vs. Vertical Pixel')
self._graph_axis = plt.gca()
plt.connect('button_press_event', self.draw_yzcross)
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
from matplotlib.colors import LogNorm
if isinstance(self._graph.norm, LogNorm):
vmin = self.data.I[(self.data.I>0)].min()*0.5
else:
vmin = 0
vmax = self.data.I.max()
self._graph.set_array(self.data.I.T)
self._graph.norm.vmin = vmin
self._graph.norm.vmax = vmax
def draw_yzcross(self, event):
if event.inaxes is not self._graph_axis:
return
from matplotlib import pyplot as plt
tbm = self._graph_axis.figure.canvas.manager.toolbar.mode
if event.button is plt.MouseButton.LEFT and tbm=='':
self._graph_axis.plot([event.xdata, event.xdata], [self.z[0], self.z[-1]], '-', color='grey')
self._graph_axis.plot([self.y[0], self.y[-1]], [event.ydata, event.ydata], '-', color='grey')
self._graph_axis.text(event.xdata, event.ydata, f'({event.xdata:.1f}, {event.ydata:.1f})', backgroundcolor='white')
plt.draw()
if event.button is plt.MouseButton.RIGHT and tbm=='':
for art in list(self._graph_axis.lines)+list(self._graph_axis.texts):
art.remove()
plt.draw()
class YTProjection(YZProjection):
theta: np.ndarray
def __init__(self, tthh: float):
dd = Detector.delta_z[1]-Detector.delta_z[0]
delta = np.hstack([Detector.delta_z, Detector.delta_z[-1]+dd])-dd/2.
self.theta = tthh + delta
super().__init__()
def plot(self, **kwargs):
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
if 'colorbar' in kwargs:
cmap=True
del(kwargs['colorbar'])
else:
cmap=False
if not 'norm' in kwargs:
kwargs['norm'] = LogNorm()
self._graph = plt.pcolormesh(self.y, self.theta, self.data.I.T, **kwargs)
if cmap:
plt.colorbar(label='I / cpm')
plt.xlabel('Y')
plt.ylabel('Theta / °')
plt.xlim(self.y[0], self.y[-1])
plt.ylim(self.theta[-1], self.theta[0])
plt.title('Horizontal Pixel vs. Angle')
self._graph_axis = plt.gca()
plt.connect('button_press_event', self.draw_tzcross)
def draw_tzcross(self, event):
if event.inaxes is not self._graph_axis:
return
from matplotlib import pyplot as plt
tbm = self._graph_axis.figure.canvas.manager.toolbar.mode
if event.button is plt.MouseButton.LEFT and tbm=='':
self._graph_axis.plot([event.xdata, event.xdata], [self.theta[0], self.theta[-1]], '-', color='grey')
self._graph_axis.plot([self.y[0], self.y[-1]], [event.ydata, event.ydata], '-', color='grey')
self._graph_axis.text(event.xdata, event.ydata, f'({event.xdata:.1f}, {event.ydata:.1f})', backgroundcolor='white')
plt.draw()
if event.button is plt.MouseButton.RIGHT and tbm=='':
for art in list(self._graph_axis.lines)+list(self._graph_axis.texts):
art.remove()
plt.draw()
class TofZProjection(ProjectionInterface):
tof: np.ndarray
z: np.ndarray
data: np.recarray
_dtype = np.dtype([
('cts', np.float64),
('I', np.float64),
('err', np.float64),
])
def __init__(self, tau, foldback=False):
self.z = np.arange(Detector.nBlades*Detector.nWires+1)-0.5
if foldback:
self.tof = np.arange(0, tau, 0.0005)
else:
self.tof = np.arange(0, 2*tau, 0.0005)
self.data = np.zeros((self.tof.shape[0]-1, self.z.shape[0]-1), dtype=self._dtype).view(np.recarray)
self.monitor = 0.
def project(self, dataset: EventDatasetProtocol, monitor: float):
detYi, detZi, detX, delta = Detector.pixelLookUp[dataset.data.events.pixelID-1].T
cts , *_ = np.histogram2d(dataset.data.events.tof, detZi, bins=(self.tof, self.z))
self.data.cts += cts
self.monitor += monitor
self.data.I = self.data.cts / self.monitor
self.data.err = np.sqrt(self.data.cts) / self.monitor
def clear(self):
self.data[:] = 0
self.monitor = 0.
def plot(self, **kwargs):
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
if 'colorbar' in kwargs:
cmap=True
del(kwargs['colorbar'])
else:
cmap=False
if not 'norm' in kwargs:
kwargs['norm'] = LogNorm()
self._graph = plt.pcolormesh(self.tof*1e3, self.z, self.data.I.T, **kwargs)
if cmap:
plt.colorbar(label='I / cpm')
plt.xlabel('Time of Flight / ms')
plt.ylabel('Z')
plt.xlim(self.tof[0]*1e3, self.tof[-1]*1e3)
plt.ylim(self.z[-1], self.z[0])
plt.title('Time of Flight vs. Vertical Pixel')
self._graph_axis = plt.gca()
plt.connect('button_press_event', self.draw_tzcross)
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
from matplotlib.colors import LogNorm
if isinstance(self._graph.norm, LogNorm):
vmin = self.data.I[(self.data.I>0)].min()*0.5
else:
vmin = 0
vmax = self.data.I.max()
self._graph.set_array(self.data.I.T)
self._graph.norm.vmin = vmin
self._graph.norm.vmax = vmax
def draw_tzcross(self, event):
if event.inaxes is not self._graph_axis:
return
from matplotlib import pyplot as plt
tbm = self._graph_axis.figure.canvas.manager.toolbar.mode
if event.button is plt.MouseButton.LEFT and tbm=='':
self._graph_axis.plot([event.xdata, event.xdata], [self.z[0], self.z[-1]], '-', color='grey')
self._graph_axis.plot([self.tof[0]*1e3, self.tof[-1]*1e3], [event.ydata, event.ydata], '-', color='grey')
self._graph_axis.text(event.xdata, event.ydata, f'({event.xdata:.2f}, {event.ydata:.1f})', backgroundcolor='white')
plt.draw()
if event.button is plt.MouseButton.RIGHT and tbm=='':
for art in list(self._graph_axis.lines)+list(self._graph_axis.texts):
art.remove()
plt.draw()
class TofProjection(ProjectionInterface):
tof: np.ndarray
data: np.recarray
_dtype = np.dtype([
('cts', np.float64),
('I', np.float64),
('err', np.float64),
])
def __init__(self, tau, foldback=False):
if foldback:
self.tof = np.arange(0, tau, 0.0005)
else:
self.tof = np.arange(0, 2*tau, 0.0005)
self.data = np.zeros(self.tof.shape[0]-1, dtype=self._dtype).view(np.recarray)
self.monitor = 0.
def project(self, dataset: EventDatasetProtocol, monitor: float):
cts , *_ = np.histogram(dataset.data.events.tof, bins=self.tof)
self.data.cts += cts
self.monitor += monitor
self.data.I = self.data.cts / self.monitor
self.data.err = np.sqrt(self.data.cts) / self.monitor
def clear(self):
self.data[:] = 0
self.monitor = 0.
def plot(self, **kwargs):
from matplotlib import pyplot as plt
for key in ONLY_MAP:
if key in kwargs: del(kwargs[key])
self._graph = plt.plot(self.tof[:-1]*1e3, self.data.I, **kwargs)
plt.xlabel('Time of Flight / ms')
plt.ylabel('I / cpm')
plt.xlim(self.tof[0]*1e3, self.tof[-1]*1e3)
plt.title('Time of Flight')
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
self._graph[0].set_ydata(self.data.I.T)
class LProjection(ProjectionInterface):
lamda: np.ndarray
data: np.recarray
_dtype = np.dtype([
('cts', np.float64),
('I', np.float64),
('err', np.float64),
])
def __init__(self):
self.lamda = np.linspace(3.0, 12.0, 91)
self.data = np.zeros(self.lamda.shape[0]-1, dtype=self._dtype).view(np.recarray)
self.monitor = 0.
def project(self, dataset: EventDatasetProtocol, monitor: float):
cts , *_ = np.histogram(dataset.data.events.lamda, bins=self.lamda)
self.data.cts += cts
self.monitor += monitor
self.data.I = self.data.cts / self.monitor
self.data.err = np.sqrt(self.data.cts) / self.monitor
def clear(self):
self.data[:] = 0
self.monitor = 0.
def plot(self, **kwargs):
from matplotlib import pyplot as plt
for key in ONLY_MAP:
if key in kwargs: del(kwargs[key])
self._graph = plt.plot(self.lamda[:-1], self.data.I, **kwargs)
plt.xlabel('Wavelength / Angstrom')
plt.ylabel('I / cpm')
plt.xlim(self.lamda[0], self.lamda[-1])
plt.title('Wavelength')
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
self._graph[0].set_ydata(self.data.I.T)
class TProjection(ProjectionInterface):
theta: np.ndarray
z: np.ndarray
data: np.recarray
_dtype = np.dtype([
('cts', np.float64),
('I', np.float64),
('err', np.float64),
])
def __init__(self, tthh):
self.z = np.arange(Detector.nBlades*Detector.nWires+1)-0.5
dd = Detector.delta_z[1]-Detector.delta_z[0]
delta = np.hstack([Detector.delta_z, Detector.delta_z[-1]+dd])-dd/2.
self.theta = tthh+delta
self.data = np.zeros(self.theta.shape[0]-1, dtype=self._dtype).view(np.recarray)
self.monitor = 0.
def project(self, dataset: EventDatasetProtocol, monitor: float):
detYi, detZi, detX, delta = Detector.pixelLookUp[dataset.data.events.pixelID-1].T
cts , *_ = np.histogram(detZi, bins=self.z)
self.data.cts += cts
self.monitor += monitor
self.data.I = self.data.cts / self.monitor
self.data.err = np.sqrt(self.data.cts) / self.monitor
def clear(self):
self.data[:] = 0
self.monitor = 0.
def plot(self, **kwargs):
from matplotlib import pyplot as plt
for key in ONLY_MAP:
if key in kwargs: del(kwargs[key])
self._graph = plt.plot(self.theta[:-1], self.data.I, **kwargs)
plt.xlabel('Reflection Angle / °')
plt.ylabel('I / cpm')
plt.xlim(self.theta[-1], self.theta[0])
plt.title('Theta')
def update_plot(self):
"""
Inline update of previous plot by just updating the data.
"""
self._graph[0].set_ydata(self.data.I.T)
class CombinedProjection(ProjectionInterface):
"""
Allows to put multiple projections together to conveniently generate combined graphs.
"""
projections: List[ProjectionInterface]
projection_placements: List[Union[Tuple[int, int], Tuple[int, int, int, int]]]
grid_size: Tuple[int, int]
def __init__(self, grid_rows, grid_cols, projections, projection_placements):
self.projections = projections
self.projection_placements = projection_placements
self.grid_size = grid_rows, grid_cols
def project(self, dataset: EventDatasetProtocol, monitor: float):
for pi in self.projections:
pi.project(dataset, monitor)
def clear(self):
for pi in self.projections:
pi.clear()
def plot(self, **kwargs):
from matplotlib import pyplot as plt
fig = plt.gcf()
axs = fig.add_gridspec(self.grid_size[0], self.grid_size[1])
# axs = fig.add_gridspec(self.grid_size[0]+1, self.grid_size[1],
# height_ratios=[1.0 for i in range(self.grid_size[0])]+[0.2])
self._axes = []
for pi, placement in zip(self.projections, self.projection_placements):
if len(placement) == 2:
ax = fig.add_subplot(axs[placement[0], placement[1]])
else:
ax = fig.add_subplot(axs[placement[0]:placement[1], placement[2]:placement[3]])
pi.plot(**dict(kwargs))
# Create the RangeSlider
# from matplotlib.widgets import RangeSlider
# slider_ax = fig.add_subplot(axs[self.grid_size[0], :])
# self._slider = RangeSlider(slider_ax, "Plot Range", 0., 1., valinit=(0., 1.))
# self._slider.on_changed(self.update_range)
def update_plot(self):
for pi in self.projections:
pi.update_plot()
# def update_range(self, event):
# ...

301
eos/reduction_e2h.py Normal file
View File

@@ -0,0 +1,301 @@
"""
Events 2 histogram, quick reduction of single file to display during experiment.
Can be used as a live preview with automatic update when files are modified.
"""
import logging
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from orsopy import fileio
from datetime import datetime
from .file_reader import AmorEventData, AmorHeader
from .header import Header
from .instrument import LZGrid
from .normalization import LZNormalisation
from .options import E2HConfig, E2HPlotArguments, IncidentAngle, MonitorType, E2HPlotSelection
from . import event_handling as eh
from .path_handling import PathResolver
from .projection import CombinedProjection, LProjection, LZProjection, ProjectionInterface, ReflectivityProjector, \
TofProjection, TofZProjection, TProjection, YTProjection, YZProjection
NEEDS_LAMDA = (E2HPlotSelection.All, E2HPlotSelection.LT, E2HPlotSelection.Q, E2HPlotSelection.L)
class E2HReduction:
config: E2HConfig
header: Header
event_actions: eh.EventDataAction
_last_mtime = 0.
projection: ProjectionInterface
def __init__(self, config: E2HConfig):
self.config = config
self.header = Header()
self.fig = plt.figure()
self.register_colormap()
self.prepare_actions()
def prepare_actions(self):
"""
Does not do any actual reduction.
"""
self.path_resolver = PathResolver(self.config.reader.year, self.config.reader.rawPath)
self.file_list = self.path_resolver.resolve(self.config.reduction.fileIdentifier)
self.file_index = 0
self.plot_kwds = {}
plt.rcParams.update({'font.size': self.config.reduction.fontsize})
if self.config.reduction.update:
# live update implies plotting
self.config.reduction.show_plot = True
if self.config.reduction.plot==E2HPlotSelection.Raw:
# Raw implies fast caculations
self.config.reduction.fast = True
if not self.config.reduction.fast or self.config.reduction.plot in NEEDS_LAMDA:
from . import event_analysis as ea
# Actions on datasets not used for normalization
self.event_actions = eh.ApplyPhaseOffset(self.config.experiment.chopperPhaseOffset)
if not self.config.reduction.fast:
self.event_actions |= eh.CorrectChopperPhase()
self.event_actions |= ea.ExtractWalltime()
else:
logging.info(' Fast reduction always uses time normalization')
self.config.experiment.monitorType = MonitorType.time
self.event_actions |= eh.AssociatePulseWithMonitor(self.config.experiment.monitorType)
if self.config.experiment.monitorType in [MonitorType.proton_charge, MonitorType.debug]:
# the filtering only makes sense if using actual monitor data, not time
self.event_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
if not self.config.reduction.fast:
self.event_actions |= eh.FilterStrangeTimes()
if self.config.reduction.plot in [E2HPlotSelection.YT, E2HPlotSelection.YZ]:
# perform time fold-back and apply yRange filter if not fast mode
self.event_actions |= ea.MergeFrames()
self.event_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)
if self.config.reduction.plot==E2HPlotSelection.YT:
# perform corrections for tof if not fast mode
self.event_actions |= eh.TofTimeCorrection(self.config.experiment.incidentAngle==IncidentAngle.alphaF)
# select needed actions in depenence of plots
if self.config.reduction.plot in NEEDS_LAMDA:
self.event_actions |= ea.MergeFrames()
self.event_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)
self.event_actions |= eh.TofTimeCorrection(self.config.experiment.incidentAngle==IncidentAngle.alphaF)
self.event_actions |= ea.CalculateWavelength(self.config.experiment.lambdaRange)
self.event_actions |= eh.ApplyMask()
# plot dependant options
if self.config.reduction.plot in [E2HPlotSelection.All, E2HPlotSelection.LT, E2HPlotSelection.Q]:
self.grid = LZGrid(0.01, [0.0, 0.25])
if self.config.reduction.plot in [E2HPlotSelection.All, E2HPlotSelection.Raw,
E2HPlotSelection.LT, E2HPlotSelection.YT,
E2HPlotSelection.YZ, E2HPlotSelection.TZ]:
self.plot_kwds['colorbar'] = True
self.plot_kwds['cmap'] = str(self.config.reduction.plot_colormap)
if self.config.reduction.plotArgs==E2HPlotArguments.Linear:
self.plot_kwds['norm'] = None
def reduce(self):
if self.config.reduction.plot in [E2HPlotSelection.All, E2HPlotSelection.LT, E2HPlotSelection.Q]:
if self.config.reduction.normalizationModel:
self.norm = LZNormalisation.model(self.grid)
else:
self.norm = LZNormalisation.unity(self.grid)
self.prepare_graphs()
while self.file_index < len(self.file_list):
self.read_data()
self.add_data()
if self.config.reduction.plotArgs==E2HPlotArguments.OutputFile:
self.create_file_output()
if self.config.reduction.plotArgs!=E2HPlotArguments.OutputFile or self.config.reduction.show_plot:
self.create_graph()
if self.config.reduction.plotArgs==E2HPlotArguments.Default and not self.config.reduction.update:
# safe to image file if not auto-updating graph
plt.savefig(f'e2h_{self.config.reduction.plot}.png', dpi=300)
if self.config.reduction.update:
self.timer = self.fig.canvas.new_timer(1000)
self.timer.add_callback(self.update)
self.timer.start()
if self.config.reduction.show_plot:
plt.show()
def register_colormap(self):
cmap = plt.colormaps['turbo'](np.arange(256))
cmap[:1, :] = np.array([256/256, 255/256, 236/256, 1])
cmap = ListedColormap(cmap, name='jochen_deluxe', N=cmap.shape[0])
#cmap.set_bad((1.,1.,0.9))
plt.colormaps.register(cmap)
def prepare_graphs(self):
last_file_header = AmorHeader(self.file_list[-1])
tthh = last_file_header.geometry.nu - last_file_header.geometry.mu
if not self.config.reduction.is_default('thetaRangeR'):
# adjust range based on detector center
thetaRange = [ti+tthh for ti in self.config.reduction.thetaRangeR]
else:
thetaRange = [tthh - last_file_header.geometry.div/2, tthh + last_file_header.geometry.div/2]
if self.config.reduction.plot==E2HPlotSelection.LT:
self.projection = LZProjection(tthh, self.grid)
if not self.config.reduction.fast:
self.projection.correct_gravity(last_file_header.geometry.detectorDistance)
self.projection.apply_lamda_mask(self.config.experiment.lambdaRange)
self.projection.apply_theta_mask(thetaRange)
for thi in self.config.reduction.thetaFilters:
self.projection.apply_theta_filter((thi[0]+tthh, thi[1]+tthh))
self.projection.apply_norm_mask(self.norm)
if self.config.reduction.plot==E2HPlotSelection.Q:
plz = LZProjection(tthh, self.grid)
if not self.config.reduction.fast:
plz.correct_gravity(last_file_header.geometry.detectorDistance)
plz.calculate_q()
plz.apply_lamda_mask(self.config.experiment.lambdaRange)
plz.apply_theta_mask(thetaRange)
for thi in self.config.reduction.thetaFilters:
self.projection.apply_theta_filter((thi[0]+tthh, thi[1]+tthh))
plz.apply_norm_mask(self.norm)
self.projection = ReflectivityProjector(plz, self.norm)
if self.config.reduction.plot==E2HPlotSelection.YZ:
self.projection = YZProjection()
if self.config.reduction.plot==E2HPlotSelection.YT:
self.projection = YTProjection(tthh)
if self.config.reduction.plot==E2HPlotSelection.T:
self.projection = TProjection(tthh)
if self.config.reduction.plot==E2HPlotSelection.L:
self.projection = LProjection()
if self.config.reduction.plot==E2HPlotSelection.TZ:
self.projection = TofZProjection(last_file_header.timing.tau, foldback=not self.config.reduction.fast)
if self.config.reduction.plot==E2HPlotSelection.ToF:
self.projection = TofProjection(last_file_header.timing.tau, foldback=not self.config.reduction.fast)
if self.config.reduction.plot==E2HPlotSelection.All:
plz = LZProjection(tthh, self.grid)
if not self.config.reduction.fast:
plz.correct_gravity(last_file_header.geometry.detectorDistance)
plz.calculate_q()
plz.apply_lamda_mask(self.config.experiment.lambdaRange)
plz.apply_theta_mask(thetaRange)
for thi in self.config.reduction.thetaFilters:
plz.apply_theta_filter((thi[0]+tthh, thi[1]+tthh))
plz.apply_norm_mask(self.norm)
pr = ReflectivityProjector(plz, self.norm)
pyz = YZProjection()
self.projection = CombinedProjection(3, 2, [plz, pyz, pr],
[(0, 2, 0, 1), (0, 2, 1, 2), (2, 3, 0, 2)])
if self.config.reduction.plot==E2HPlotSelection.Raw:
del(self.plot_kwds['colorbar'])
# A combined graph that does not require longer calculations
plyt = YTProjection(tthh)
pltofz = TofZProjection(last_file_header.timing.tau, foldback=not self.config.reduction.fast)
pltof = TofProjection(last_file_header.timing.tau, foldback=not self.config.reduction.fast)
plt = TProjection(tthh)
self.projection = CombinedProjection(3, 3, [plyt, pltofz, plt, pltof],
[(0,2, 0, 1), (0, 2, 1, 3), (2,3, 0,1),(2,3,1,3)])
def read_data(self):
fileName = self.file_list[self.file_index]
self.dataset = AmorEventData(fileName, max_events=self.config.reduction.max_events)
if self.dataset.EOF or fileName==self.file_list[-1]:
self.file_index += 1
self.event_actions(self.dataset)
self.dataset.update_header(self.header)
self.header.measurement_data_files.append(fileio.File(file=fileName.split('/')[-1],
timestamp=self.dataset.fileDate))
def add_data(self):
self.monitor = self.dataset.data.pulses.monitor.sum()
self.projection.project(self.dataset, monitor=self.monitor)
if self.config.reduction.plot==E2HPlotSelection.LT:
self.projection.normalize_over_illuminated(self.norm)
def create_file_output(self):
raise NotImplementedError("Export to text output not yet implemented")
def create_title(self):
output = "Events to Histogram - "
output += ",".join(["#"+os.path.basename(fi)[9:15].lstrip('0') for fi in self.file_list])
output += f" ($\\mu$={self.dataset.geometry.mu:.2f} ;"
output += f" $\\nu$={self.dataset.geometry.nu:.2f})"
if self.config.reduction.update:
output += f"\n at "+datetime.now().strftime("%m/%d/%Y %H:%M:%S")
return output
def create_graph(self):
plt.suptitle(self.create_title())
self.projection.plot(**self.plot_kwds)
plt.tight_layout(pad=0.5)
def replace_dataset(self, latest):
new_files = self.path_resolver.resolve(f'{latest}')
if not os.path.exists(new_files[-1]):
return
try:
# check that events exist in the new file
AmorEventData(new_files[-1], 0, max_events=1000)
except Exception:
logging.debug("Problem when trying to load new dataset", exc_info=True)
return
logging.warning(f"Preceding to next file {latest}")
self.file_list = new_files
self.file_index = 0
self.prepare_actions()
self.prepare_graphs()
self.read_data()
self.add_data()
self.fig.clear()
self.create_graph()
plt.draw()
def update(self):
logging.debug(" check for update")
if self.config.reduction.fileIdentifier=='0':
# if latest file was choosen, check if new one available and switch to it
current = int(os.path.basename(self.file_list[-1])[9:15])
latest = self.path_resolver.search_latest(0)
if latest>current:
self.replace_dataset(latest)
return
# if all events were read last time, only load more if file was modified
if self.dataset.EOF and os.path.getmtime(self.file_list[-1])<=self._last_mtime:
return
self._last_mtime = os.path.getmtime(self.file_list[-1])
try:
update_data = AmorEventData(self.file_list[-1], self.dataset.last_index+1,
max_events=self.config.reduction.max_events)
except EOFError:
return
logging.info(" updating with new data")
self.event_actions(update_data)
self.dataset=update_data
self.monitor = self.dataset.data.pulses.monitor.sum()
self.projection.project(update_data, self.monitor)
self.projection.update_plot()
plt.suptitle(self.create_title())
plt.draw()

View File

@@ -8,27 +8,21 @@ from orsopy import fileio
from .file_reader import AmorEventData
from .header import Header
from .path_handling import PathResolver
from .options import EOSConfig, IncidentAngle, MonitorType, NormalisationMethod
from .instrument import Detector, LZGrid
from .options import ReflectivityConfig, IncidentAngle, MonitorType, NormalisationMethod, MONITOR_UNITS
from .instrument import LZGrid
from .normalization import LZNormalisation
from . import event_handling as eh, event_analysis as ea
from .projection import LZProjection
MONITOR_UNITS = {
MonitorType.neutron_monitor: 'cnts',
MonitorType.proton_charge: 'mC',
MonitorType.time: 's',
MonitorType.auto: 'various',
MonitorType.debug: 'mC',
}
class AmorReduction:
def __init__(self, config: EOSConfig):
self.experiment_config = config.experiment
self.reader_config = config.reader
self.reduction_config = config.reduction
self.output_config = config.output
class ReflectivityReduction:
config: ReflectivityConfig
header: Header
normevent_actions: eh.EventDataAction
dataevent_actions: eh.EventDataAction
def __init__(self, config: ReflectivityConfig):
self.config = config
self.header = Header()
self.header.reduction.call = config.call_string()
@@ -37,61 +31,63 @@ class AmorReduction:
def prepare_actions(self):
"""
Does not do any actual reduction.
Prepare the actions applied to each event dataset, does not do any actual reduction.
"""
self.path_resolver = PathResolver(self.reader_config.year, self.reader_config.rawPath)
self.path_resolver = PathResolver(self.config.reader.year, self.config.reader.rawPath)
# setup all actions performed on event datasets before projection on the grid
# The order of these corrections matter as some rely on parameters modified before
if self.reduction_config.normalisationFileIdentifier:
if self.config.reduction.normalisationFileIdentifier:
# explicit steps performed on AmorEventDataset for normalization files
self.normevent_actions = eh.ApplyPhaseOffset(self.experiment_config.chopperPhaseOffset)
self.normevent_actions = eh.ApplyPhaseOffset(self.config.experiment.chopperPhaseOffset)
self.normevent_actions |= eh.CorrectChopperPhase()
if self.experiment_config.monitorType in [MonitorType.proton_charge, MonitorType.debug]:
self.normevent_actions |= eh.AssociatePulseWithMonitor(self.config.experiment.monitorType)
if self.config.experiment.monitorType in [MonitorType.proton_charge, MonitorType.debug]:
self.normevent_actions |= ea.ExtractWalltime()
self.normevent_actions |= eh.AssociatePulseWithMonitor(self.experiment_config.monitorType,
self.experiment_config.lowCurrentThreshold)
self.normevent_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
self.normevent_actions |= eh.FilterStrangeTimes()
self.normevent_actions |= ea.MergeFrames()
self.normevent_actions |= ea.AnalyzePixelIDs(self.experiment_config.yRange)
self.normevent_actions |= eh.TofTimeCorrection(self.experiment_config.incidentAngle==IncidentAngle.alphaF)
self.normevent_actions |= ea.CalculateWavelength(self.experiment_config.lambdaRange)
self.normevent_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)
self.normevent_actions |= eh.TofTimeCorrection(self.config.experiment.incidentAngle==IncidentAngle.alphaF)
self.normevent_actions |= ea.CalculateWavelength(self.config.experiment.lambdaRange)
self.normevent_actions |= eh.ApplyMask()
# Actions on datasets not used for normalization
self.dataevent_actions = eh.ApplyPhaseOffset(self.experiment_config.chopperPhaseOffset)
self.dataevent_actions |= eh.ApplyParameterOverwrites(self.experiment_config) # some actions use instrument parameters, change before that
self.dataevent_actions = eh.ApplyPhaseOffset(self.config.experiment.chopperPhaseOffset)
self.dataevent_actions |= eh.ApplyParameterOverwrites(self.config.experiment) # some actions use instrument parameters, change before that
self.dataevent_actions |= eh.CorrectChopperPhase()
self.dataevent_actions |= ea.ExtractWalltime()
self.dataevent_time_correction = eh.CorrectSeriesTime(0) # will be set from first dataset
self.dataevent_actions |= self.dataevent_time_correction
self.dataevent_actions |= eh.AssociatePulseWithMonitor(self.experiment_config.monitorType,
self.experiment_config.lowCurrentThreshold)
self.dataevent_actions |= eh.AssociatePulseWithMonitor(self.config.experiment.monitorType)
if self.config.experiment.monitorType in [MonitorType.proton_charge or MonitorType.debug]:
# the filtering only makes sense if using actual monitor data, not time
self.dataevent_actions |= eh.FilterMonitorThreshold(self.config.experiment.lowCurrentThreshold)
self.dataevent_actions |= eh.FilterStrangeTimes()
self.dataevent_actions |= ea.MergeFrames()
self.dataevent_actions |= ea.AnalyzePixelIDs(self.experiment_config.yRange)
self.dataevent_actions |= eh.TofTimeCorrection(self.experiment_config.incidentAngle==IncidentAngle.alphaF)
self.dataevent_actions |= ea.CalculateWavelength(self.experiment_config.lambdaRange)
self.dataevent_actions |= ea.CalculateQ(self.experiment_config.incidentAngle)
self.dataevent_actions |= ea.FilterQzRange(self.reduction_config.qzRange)
self.dataevent_actions |= ea.AnalyzePixelIDs(self.config.experiment.yRange)
self.dataevent_actions |= eh.TofTimeCorrection(self.config.experiment.incidentAngle==IncidentAngle.alphaF)
self.dataevent_actions |= ea.CalculateWavelength(self.config.experiment.lambdaRange)
self.dataevent_actions |= ea.CalculateQ(self.config.experiment.incidentAngle)
self.dataevent_actions |= ea.FilterQzRange(self.config.reduction.qzRange)
self.dataevent_actions |= eh.ApplyMask()
self.grid = LZGrid(self.reduction_config.qResolution, self.reduction_config.qzRange)
self.grid = LZGrid(self.config.reduction.qResolution, self.config.reduction.qzRange)
def reduce(self):
if not os.path.exists(f'{self.output_config.outputPath}'):
logging.debug(f'Creating destination path {self.output_config.outputPath}')
os.system(f'mkdir {self.output_config.outputPath}')
if not os.path.exists(f'{self.config.output.outputPath}'):
logging.debug(f'Creating destination path {self.config.output.outputPath}')
os.system(f'mkdir {self.config.output.outputPath}')
# load or create normalisation matrix
if self.reduction_config.normalisationFileIdentifier:
if self.config.reduction.normalisationFileIdentifier:
# TODO: change option definition to single normalization short_code
self.create_normalisation_map(self.reduction_config.normalisationFileIdentifier[0])
self.create_normalisation_map(self.config.reduction.normalisationFileIdentifier[0])
else:
self.norm = LZNormalisation.unity(self.grid)
# load R(q_z) curve to be subtracted:
if self.reduction_config.subtract:
self.sq_q, self.sR_q, self.sdR_q, self.sFileName = self.loadRqz(self.reduction_config.subtract)
if self.config.reduction.subtract:
self.sq_q, self.sR_q, self.sdR_q, self.sFileName = self.loadRqz(self.config.reduction.subtract)
logging.warning(f'loaded background file: {self.sFileName}')
self.header.reduction.corrections.append(f'background from \'{self.sFileName}\' subtracted')
self.subtract = True
@@ -101,21 +97,21 @@ class AmorReduction:
# load measurement data and do the reduction
self.datasetsRqz = []
self.datasetsRlt = []
for i, short_notation in enumerate(self.reduction_config.fileIdentifier):
for i, short_notation in enumerate(self.config.reduction.fileIdentifier):
self.read_file_block(i, short_notation)
# output
logging.warning('output:')
if 'Rqz.ort' in self.output_config.outputFormats:
if 'Rqz.ort' in self.config.output.outputFormats:
self.save_Rqz()
if 'Rlt.ort' in self.output_config.outputFormats:
if 'Rlt.ort' in self.config.output.outputFormats:
self.save_Rtl()
if self.output_config.plot:
if self.config.output.plot:
import matplotlib.pyplot as plt
if 'Rqz.ort' in self.output_config.outputFormats:
if 'Rqz.ort' in self.config.output.outputFormats:
plt.figure(num=99)
plt.legend()
plt.show()
@@ -127,18 +123,18 @@ class AmorReduction:
self.header.measurement_data_files = []
self.dataset = AmorEventData(file_list[0])
if self.experiment_config.monitorType==MonitorType.auto:
if self.config.experiment.monitorType==MonitorType.auto:
if self.dataset.data.proton_current.current.sum()>1:
self.experiment_config.monitorType = MonitorType.proton_charge
self.config.experiment.monitorType = MonitorType.proton_charge
logging.debug(' monitor type set to "proton current"')
else:
self.experiment_config.monitorType = MonitorType.time
self.config.experiment.monitorType = MonitorType.time
logging.debug(' monitor type set to "time"')
# update actions to sue selected monitor
self.prepare_actions()
# reload normalization to make sure the monitor matches
if self.reduction_config.normalisationFileIdentifier:
self.create_normalisation_map(self.reduction_config.normalisationFileIdentifier[0])
if self.config.reduction.normalisationFileIdentifier:
self.create_normalisation_map(self.config.reduction.normalisationFileIdentifier[0])
self.dataevent_time_correction.seriesStartTime = self.dataset.eventStartTime
self.dataevent_actions(self.dataset)
@@ -154,7 +150,7 @@ class AmorReduction:
timestamp=self.dataset.fileDate))
if self.reduction_config.timeSlize:
if self.config.reduction.timeSlize:
if i>0:
logging.warning(" time slizing should only be used for one set of datafiles, check parameters")
self.analyze_timeslices(i)
@@ -162,26 +158,26 @@ class AmorReduction:
self.analyze_unsliced(i)
def analyze_unsliced(self, i):
self.monitor = np.sum(self.dataset.data.pulses.monitor)
logging.warning(f' monitor = {self.monitor:8.2f} {MONITOR_UNITS[self.experiment_config.monitorType]}')
self.monitor = self.dataset.data.pulses.monitor.sum()
logging.warning(f' monitor = {self.monitor:8.2f} {MONITOR_UNITS[self.config.experiment.monitorType]}')
proj:LZProjection = self.project_on_lz()
try:
scale = self.reduction_config.scale[i]
scale = self.config.reduction.scale[i]
except IndexError:
scale = self.reduction_config.scale[-1]
scale = self.config.reduction.scale[-1]
proj.scale(scale)
if 'Rqz.ort' in self.output_config.outputFormats:
if 'Rqz.ort' in self.config.output.outputFormats:
headerRqz = self.header.orso_header()
headerRqz.data_set = f'Nr {i} : mu = {self.dataset.geometry.mu:6.3f} deg'
# projection on qz-grid
result = proj.project_on_qz()
if self.reduction_config.autoscale:
if self.config.reduction.autoscale:
if i==0:
result.autoscale(self.reduction_config.autoscale)
result.autoscale(self.config.reduction.autoscale)
else:
result.stitch(self.last_result)
@@ -196,12 +192,12 @@ class AmorReduction:
self.last_result = result
self.datasetsRqz.append(orso_data)
if self.output_config.plot:
if self.config.output.plot:
import matplotlib.pyplot as plt
# plot all reflectivity results in same graph
plt.figure(num=99)
result.plot(label=f'{self.reduction_config.fileIdentifier[i]}')
if 'Rlt.ort' in self.output_config.outputFormats:
result.plot(label=f'{self.config.reduction.fileIdentifier[i]}')
if 'Rlt.ort' in self.config.output.outputFormats:
columns = [
fileio.Column('Qz', '1/angstrom', 'normal momentum transfer'),
fileio.Column('R', '', 'specular reflectivity'),
@@ -243,22 +239,22 @@ class AmorReduction:
self.datasetsRlt.append(orso_data)
j += 1
if self.output_config.plot:
if self.config.output.plot:
import matplotlib.pyplot as plt
plt.figure()
proj.plot(colorbar=True, cmap=str(self.output_config.plot_colormap))
plt.title(f'{self.reduction_config.fileIdentifier[i]}')
proj.plot(colorbar=True, cmap=str(self.config.output.plot_colormap))
plt.title(f'{self.config.reduction.fileIdentifier[i]}')
def analyze_timeslices(self, i):
wallTime_e = np.float64(self.dataset.data.events.wallTime)/1e9
pulseTimeS = np.float64(self.dataset.data.pulses.time)/1e9
interval = self.reduction_config.timeSlize[0]
interval = self.config.reduction.timeSlize[0]
try:
start = self.reduction_config.timeSlize[1]
start = self.config.reduction.timeSlize[1]
except IndexError:
start = 0
try:
stop = self.reduction_config.timeSlize[2]
stop = self.config.reduction.timeSlize[2]
except IndexError:
stop = wallTime_e[-1]
# make overwriting log lines possible by removing newline at the end
@@ -268,23 +264,23 @@ class AmorReduction:
for ti, time in enumerate(np.arange(start, stop, interval)):
slice = self.dataset.get_timeslice(time, time+interval)
self.monitor = np.sum(slice.data.pulses.monitor)
logging.info(f' {ti:<4d} {time:6.0f} {self.monitor:7.2f} {MONITOR_UNITS[self.experiment_config.monitorType]}')
logging.info(f' {ti:<4d} {time:6.0f} {self.monitor:7.2f} {MONITOR_UNITS[self.config.experiment.monitorType]}')
proj: LZProjection = self.project_on_lz(slice)
try:
scale = self.reduction_config.scale[i]
scale = self.config.reduction.scale[i]
except IndexError:
scale = self.reduction_config.scale[-1]
scale = self.config.reduction.scale[-1]
proj.scale(scale)
# projection on qz-grid
result = proj.project_on_qz()
if self.reduction_config.autoscale:
if self.config.reduction.autoscale:
# scale every slice the same
if ti==0:
if i==0:
atscale = result.autoscale(self.reduction_config.autoscale)
atscale = result.autoscale(self.config.reduction.autoscale)
else:
atscale = result.stitch(self.last_result)
else:
@@ -303,11 +299,11 @@ class AmorReduction:
orso_data = fileio.OrsoDataset(headerRqz, result.data_for_time(time))
self.datasetsRqz.append(orso_data)
if self.output_config.plot:
if self.config.output.plot:
import matplotlib.pyplot as plt
# plot all reflectivity results in same graph
plt.figure(num=99)
result.plot(label=f'{self.reduction_config.fileIdentifier[i]} @ {time:.1f}s')
result.plot(label=f'{self.config.reduction.fileIdentifier[i]} @ {time:.1f}s')
self.last_result = result
# reset normal logging behavior
@@ -315,19 +311,19 @@ class AmorReduction:
logging.info(f' done {min(time+interval, pulseTimeS[-1]):5.0f}')
def save_Rqz(self):
fname = os.path.join(self.output_config.outputPath, f'{self.output_config.outputName}.Rqz.ort')
fname = os.path.join(self.config.output.outputPath, f'{self.config.output.outputName}.Rqz.ort')
logging.warning(f' {fname}')
theSecondLine = f' {self.header.experiment.title} | {self.header.experiment.start_date} | sample {self.header.sample.name} | R(q_z)'
fileio.save_orso(self.datasetsRqz, fname, data_separator='\n', comment=theSecondLine)
def save_Rtl(self):
fname = os.path.join(self.output_config.outputPath, f'{self.output_config.outputName}.Rlt.ort')
fname = os.path.join(self.config.output.outputPath, f'{self.config.output.outputName}.Rlt.ort')
logging.warning(f' {fname}')
theSecondLine = f' {self.header.experiment.title} | {self.header.experiment.start_date} | sample {self.header.sample.name} | R(lambda, theta)'
fileio.save_orso(self.datasetsRlt, fname, data_separator='\n', comment=theSecondLine)
def loadRqz(self, name):
fname = os.path.join(self.output_config.outputPath, name)
fname = os.path.join(self.config.output.outputPath, name)
if os.path.exists(fname):
fileName = fname
elif os.path.exists(f'{fname}.Rqz.ort'):
@@ -340,7 +336,7 @@ class AmorReduction:
return q_q, Sq_q, dS_q, fileName
def create_normalisation_map(self, short_notation):
outputPath = self.output_config.outputPath
outputPath = self.config.output.outputPath
normalisation_list = self.path_resolver.expand_file_list(short_notation)
name = '_'.join(map(str, normalisation_list))
n_path = os.path.join(outputPath, f'{name}.norm')
@@ -364,7 +360,7 @@ class AmorReduction:
toadd = AmorEventData(nfi)
self.normevent_actions(toadd)
reference.append(toadd)
self.norm = LZNormalisation(reference, self.reduction_config.normalisationMethod, self.grid)
self.norm = LZNormalisation(reference, self.config.reduction.normalisationMethod, self.grid)
if reference.data.events.shape[0] > 1e6:
self.norm.safe(n_path, self.normevent_actions.action_hash())
@@ -375,33 +371,36 @@ class AmorReduction:
if dataset is None:
dataset=self.dataset
proj = LZProjection.from_dataset(dataset, self.grid,
has_offspecular=(self.experiment_config.incidentAngle!=IncidentAngle.alphaF))
has_offspecular=(self.config.experiment.incidentAngle!=IncidentAngle.alphaF))
if not self.reduction_config.is_default('thetaRangeR'):
t0 = dataset.geometry.nu - dataset.geometry.mu
t0 = dataset.geometry.nu-dataset.geometry.mu
if not self.config.reduction.is_default('thetaRangeR'):
# adjust range based on detector center
thetaRange = [ti+t0 for ti in self.reduction_config.thetaRangeR]
thetaRange = [ti+t0 for ti in self.config.reduction.thetaRangeR]
proj.apply_theta_mask(thetaRange)
elif not self.reduction_config.is_default('thetaRange'):
proj.apply_theta_mask(self.reduction_config.thetaRange)
elif not self.config.reduction.is_default('thetaRange'):
proj.apply_theta_mask(self.config.reduction.thetaRange)
else:
thetaRange = [dataset.geometry.nu - dataset.geometry.mu - dataset.geometry.div/2,
dataset.geometry.nu - dataset.geometry.mu + dataset.geometry.div/2]
proj.apply_theta_mask(thetaRange)
for thi in self.config.reduction.thetaFilters:
# apply theta filters relative to angle on detector (issues with parts of the incoming divergence)
proj.apply_theta_filter((thi[0]+t0, thi[1]+t0))
proj.apply_lamda_mask(self.experiment_config.lambdaRange)
proj.apply_lamda_mask(self.config.experiment.lambdaRange)
proj.apply_norm_mask(self.norm)
proj.project(dataset, self.monitor)
if self.reduction_config.normalisationMethod == NormalisationMethod.over_illuminated:
if self.config.reduction.normalisationMethod == NormalisationMethod.over_illuminated:
logging.debug(' assuming an overilluminated sample and correcting for the angle of incidence')
proj.normalize_over_illuminated(self.norm)
elif self.reduction_config.normalisationMethod==NormalisationMethod.under_illuminated:
elif self.config.reduction.normalisationMethod==NormalisationMethod.under_illuminated:
logging.debug(' assuming an underilluminated sample and ignoring the angle of incidence')
proj.normalize_no_footprint(self.norm)
elif self.reduction_config.normalisationMethod==NormalisationMethod.direct_beam:
elif self.config.reduction.normalisationMethod==NormalisationMethod.direct_beam:
logging.debug(' assuming direct beam for normalisation and ignoring the angle of incidence')
proj.normalize_no_footprint(self.norm)
else:

File diff suppressed because it is too large Load Diff

View File

@@ -2,5 +2,6 @@ numpy
h5py
orsopy
numba
matplotlib
backports.strenum; python_version<"3.11"
backports.zoneinfo; python_version<"3.9"

View File

@@ -34,3 +34,4 @@ Homepage = "https://github.com/jochenstahn/amor"
[options.entry_points]
console_scripts =
eos = eos.__main__:main
events2histogram = eos.e2h:main

View File

@@ -2,7 +2,7 @@ import os
import cProfile
from unittest import TestCase
from dataclasses import fields, MISSING
from eos import options, reduction, logconfig
from eos import options, reduction_reflectivity, logconfig
logconfig.setup_logging()
logconfig.update_loglevel(1)
@@ -14,7 +14,7 @@ class FullAmorTest(TestCase):
def setUpClass(cls):
# generate map for option defaults
cls._field_defaults = {}
for opt in [options.ExperimentConfig, options.ReductionConfig, options.OutputConfig]:
for opt in [options.ExperimentConfig, options.ReflectivityReductionConfig, options.ReflectivityOutputConfig]:
defaults = {}
for field in fields(opt):
if field.default not in [None, MISSING]:
@@ -58,28 +58,28 @@ class FullAmorTest(TestCase):
muOffset=0.0,
sampleModel='air | 10 H2O | D2O'
)
reduction_config = options.ReductionConfig(
normalisationMethod=self._field_defaults['ReductionConfig']['normalisationMethod'],
reduction_config = options.ReflectivityReductionConfig(
normalisationMethod=self._field_defaults['ReflectivityReductionConfig']['normalisationMethod'],
qResolution=0.01,
qzRange=self._field_defaults['ReductionConfig']['qzRange'],
qzRange=self._field_defaults['ReflectivityReductionConfig']['qzRange'],
thetaRange=(-0.75, 0.75),
fileIdentifier=["6003-6005"],
scale=[1],
normalisationFileIdentifier=[],
timeSlize=[300.0]
)
output_config = options.OutputConfig(
output_config = options.ReflectivityOutputConfig(
outputFormats=[options.OutputFomatOption.Rqz_ort],
outputName='test',
outputPath='test_results',
)
config=options.EOSConfig(self.reader_config, experiment_config, reduction_config, output_config)
config=options.ReflectivityConfig(self.reader_config, experiment_config, reduction_config, output_config)
# run three times to get similar timing to noslicing runs
reducer = reduction.AmorReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
reducer = reduction.AmorReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
reducer = reduction.AmorReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
def test_noslicing(self):
@@ -96,24 +96,24 @@ class FullAmorTest(TestCase):
nu=0,
muOffset=0.0,
)
reduction_config = options.ReductionConfig(
normalisationMethod=self._field_defaults['ReductionConfig']['normalisationMethod'],
reduction_config = options.ReflectivityReductionConfig(
normalisationMethod=self._field_defaults['ReflectivityReductionConfig']['normalisationMethod'],
qResolution=0.01,
qzRange=self._field_defaults['ReductionConfig']['qzRange'],
qzRange=self._field_defaults['ReflectivityReductionConfig']['qzRange'],
thetaRange=(-0.75, 0.75),
fileIdentifier=["6003", "6004", "6005"],
scale=[1],
normalisationFileIdentifier=["5952"],
autoscale=(0.0, 0.05),
)
output_config = options.OutputConfig(
output_config = options.ReflectivityOutputConfig(
outputFormats=[options.OutputFomatOption.Rqz_ort],
outputName='test',
outputPath='test_results',
)
config=options.EOSConfig(self.reader_config, experiment_config, reduction_config, output_config)
reducer = reduction.AmorReduction(config)
config=options.ReflectivityConfig(self.reader_config, experiment_config, reduction_config, output_config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()
# run second time to reuse norm file
reducer = reduction.AmorReduction(config)
reducer = reduction_reflectivity.ReflectivityReduction(config)
reducer.reduce()