Files
Jungfraujoch/writer/HDF5NXmx.cpp

326 lines
14 KiB
C++

// Copyright (2019-2023) Paul Scherrer Institute
#include <cmath>
#include <sys/stat.h>
#include "HDF5NXmx.h"
#include "../common/GitInfo.h"
#include "../include/spdlog/fmt/fmt.h"
#include "MakeDirectory.h"
void HDF5Metadata::NXmx( const StartMessage &start, const EndMessage &end) {
const std::string& filename = start.file_prefix + "_master.h5";
MakeDirectory(filename);
HDF5File hdf5_file(filename);
chmod(filename.c_str(), S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP); // default permissions
hdf5_file.Attr("file_name", filename);
hdf5_file.Attr("HDF5_Version", hdf5_version());
HDF5Group(hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
HDF5Group(hdf5_file, "/entry/result").NXClass("NXcollection");
LinkToData(&hdf5_file, start, end);
Facility(&hdf5_file, start);
Time(&hdf5_file, start, end);
Detector(&hdf5_file, start, end);
Metrology(&hdf5_file, start);
Beam(&hdf5_file, start);
Sample(&hdf5_file, start, end);
Calibration(&hdf5_file, start);
AzimuthalIntegration(&hdf5_file, start, end);
ADUHistogram(&hdf5_file, end);
Attenuator(&hdf5_file, start);
}
void HDF5Metadata::Time(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
if (end.end_date) {
hdf5_file->Attr("file_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time_estimated", end.end_date.value());
}
hdf5_file->SaveScalar("/entry/start_time", start.arm_date);
}
void HDF5Metadata::Facility(HDF5File *hdf5_file, const StartMessage &start) {
HDF5Group(*hdf5_file, "/entry/source").NXClass("NXsource");
SaveScalar(*hdf5_file, "/entry/source/name", start.source_name)
->Attr("short_name", start.source_name_short);
HDF5Group(*hdf5_file, "/entry/instrument").NXClass("NXinstrument");
SaveScalar(*hdf5_file, "/entry/instrument/name", start.instrument_name)
->Attr("short_name", start.instrument_name_short);
}
void HDF5Metadata::Detector(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
HDF5Group group(*hdf5_file, "/entry/instrument/detector");
group.NXClass("NXdetector");
SaveScalar(group, "beam_center_x", start.beam_center_x)->Units("pixel");
SaveScalar(group, "beam_center_y", start.beam_center_y)->Units("pixel");
SaveScalar(group, "distance", start.detector_distance)->Units("m");
SaveScalar(group, "detector_distance", start.detector_distance)->Units("m");
SaveScalar(group, "count_time", start.count_time)->Units("s");
SaveScalar(group, "frame_time", start.frame_time)->Units("s");
SaveScalar(group, "sensor_thickness", start.sensor_thickness)->Units("m");
SaveScalar(group, "x_pixel_size", start.pixel_size_x)->Units("m");
SaveScalar(group, "y_pixel_size", start.pixel_size_y)->Units("m");
SaveScalar(group, "sensor_material", start.sensor_material);
SaveScalar(group, "description", start.detector_description);
SaveScalar(group, "bit_depth_image", start.pixel_bit_depth);
SaveScalar(group, "bit_depth_readout", 16);
SaveScalar(group, "saturation_value", start.saturation_value);
if (start.error_value)
SaveScalar(group, "error_value", start.error_value.value()); // this is not NXmx
SaveScalar(group, "flatfield_applied", start.flatfield_enabled);
SaveScalar(group, "pixel_mask_applied", start.pixel_mask_enabled);
SaveScalar(group, "acquisition_type", "triggered");
SaveScalar(group, "countrate_correction_applied", start.countrate_correction_enabled);
SaveScalar(group, "number_of_cycles", start.summation);
// HDF5Group(group, "geometry").NXClass("NXgeometry");
// DIALS likes to have this soft link
H5Lcreate_soft("/entry/data/data", group.GetID(), "data",
H5P_DEFAULT, H5P_DEFAULT);
HDF5Group det_specific(group, "detectorSpecific");
det_specific.NXClass("NXcollection");
SaveScalar(det_specific, "nimages", end.max_image_number);
SaveScalar(det_specific, "ntrigger", 1);
SaveScalar(det_specific, "x_pixels_in_detector", static_cast<uint32_t>(start.image_size_x));
SaveScalar(det_specific, "y_pixels_in_detector", static_cast<uint32_t>(start.image_size_y));
SaveScalar(det_specific, "software_git_commit", jfjoch_git_sha1());
SaveScalar(det_specific, "software_git_date", jfjoch_git_date());
if (start.storage_cell_number) {
SaveScalar(det_specific, "storage_cell_number", static_cast<uint32_t>(start.storage_cell_number.value()));
if (start.storage_cell_number.value() > 1)
SaveScalar(det_specific, "storage_cell_delay", static_cast<uint32_t>(start.storage_cell_delay_ns))->Units(
"ns");
}
if (end.images_collected_count)
SaveScalar(det_specific, "nimages_collected", end.images_collected_count.value());
if (end.images_sent_to_write_count)
SaveScalar(det_specific, "nimages_written", end.images_sent_to_write_count.value());
if (end.efficiency)
SaveScalar(det_specific, "data_collection_efficiency", end.efficiency.value());
if (end.max_receiver_delay)
SaveScalar(det_specific, "max_receiver_delay", end.max_receiver_delay.value());
}
void HDF5Metadata::Beam(HDF5File *hdf5_file, const StartMessage &start) {
HDF5Group group(*hdf5_file, "/entry/instrument/beam");
group.NXClass("NXbeam");
SaveScalar(group, "incident_wavelength", start.incident_wavelength)->Units("angstrom");
if (start.total_flux)
SaveScalar(group, "total_flux", start.total_flux.value())->Units("Hz");
}
void HDF5Metadata::Attenuator(HDF5File *hdf5_file, const StartMessage &start) {
if (start.attenuator_transmission) {
HDF5Group group(*hdf5_file, "/entry/instrument/attenuator");
group.NXClass("NXattenuator");
SaveScalar(group, "attenuator_transmission", start.attenuator_transmission.value());
}
}
void HDF5Metadata::Sample(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
HDF5Group group(*hdf5_file, "/entry/sample");
group.NXClass("NXsample");
group.SaveScalar("name", start.sample_name);
if (start.space_group_number > 0)
group.SaveScalar("space_group", start.space_group_number);
if (start.unit_cell) {
std::vector<float> v = {start.unit_cell->a, start.unit_cell->b, start.unit_cell->c,
start.unit_cell->alpha, start.unit_cell->beta, start.unit_cell->gamma};
group.SaveVector("unit_cell", v);
}
if ((end.max_image_number > 0) && (start.omega.increment != 0.0f)) {
group.SaveScalar("depends_on", "/entry/sample/transformations/omega");
HDF5Group transformations(group, "transformations");
transformations.NXClass("NXtransformations");
std::vector<double> angle_container(end.max_image_number);
for (int32_t i = 0; i < end.max_image_number; i++)
angle_container[i] = start.omega.start + i * start.omega.increment;
std::vector<double> axis = {start.omega.axis[0], start.omega.axis[1], start.omega.axis[2]};
SaveVector(transformations, "omega", angle_container)->
Transformation("deg", ".", "", "", "rotation", axis, {0,0,0}, "");
} else
group.SaveScalar("depends_on", ".");
}
void HDF5Metadata::Metrology(HDF5File *hdf5_file, const StartMessage &start) {
HDF5Group transformations(*hdf5_file, "/entry/instrument/detector/transformations");
transformations.NXClass("NXtransformations");
std::vector<double> vector{start.beam_center_x * start.pixel_size_x,
start.beam_center_y * start.pixel_size_y,
start.detector_distance};
double vector_length = sqrt(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2]);
std::vector<double> vector_norm{vector[0] / vector_length, vector[1]/vector_length, vector[2]/vector_length};
SaveScalar(transformations, "translation", vector_length)->
Transformation("m", ".", "detector", "detector_arm", "translation", vector_norm);
// https://manual.nexusformat.org/classes/base_classes/NXdetector_module.html?highlight=nxdetector_module
// The order of indices (i, j or i, j, k) is slow to fast.
// though EIGER has is the other way round
// Confusing....
std::vector<int32_t> origin = {0, 0};
std::vector<int32_t> size = {static_cast<int32_t>(start.image_size_y),
static_cast<int32_t>(start.image_size_x)};
DetectorModule(hdf5_file, "module", origin, size, {-1,0,0}, {0,-1,0}, "translation", start.pixel_size_x);
}
void HDF5Metadata::DetectorModule(HDF5File *hdf5_file, const std::string &name, const std::vector<int32_t> &origin,
const std::vector<int32_t> &size, const std::vector<double> &fast_axis,
const std::vector<double> &slow_axis,
const std::string &nx_axis, double pixel_size_mm) {
HDF5Group module_group(*hdf5_file, "/entry/instrument/detector/" + name);
module_group.NXClass("NXdetector_module");
module_group.SaveVector("data_origin", origin);
module_group.SaveVector("data_size", size);
SaveScalar(module_group, "fast_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", fast_axis,
{0,0,0}, "");
SaveScalar(module_group, "slow_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", slow_axis,
{0,0,0}, "");
SaveScalar(module_group, "module_offset", 0)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", {0,0,0});
}
void HDF5Metadata::SaveCBORImage(HDF5File *hdf5_file, const std::string &hdf5_path, const CompressedImage &image) {
std::vector<hsize_t> dims = {image.ypixel, image.xpixel};
HDF5DataType data_type(image.pixel_depth_bytes, image.pixel_is_signed);
HDF5Dcpl dcpl;
dcpl.SetCompression(image.algorithm, H5Tget_size(data_type.GetID()), 0);
dcpl.SetChunking(dims);
HDF5DataSpace data_space(dims);
auto dataset = std::make_unique<HDF5DataSet>(*hdf5_file, hdf5_path, data_type, data_space, dcpl);
if (image.algorithm == CompressionAlgorithm::NO_COMPRESSION)
dataset->Write(data_type, image.data);
else
dataset->WriteDirectChunk(image.data, image.size, {0,0});
}
void HDF5Metadata::Calibration(HDF5File *hdf5_file, const StartMessage &start) {
if (!start.pixel_mask.empty()) {
SaveCBORImage(hdf5_file, "/entry/instrument/detector/pixel_mask", start.pixel_mask[0]);
hdf5_file->HardLink("/entry/instrument/detector/pixel_mask",
"/entry/instrument/detector/detectorSpecific/pixel_mask");
}
if (!start.gain_file_names.empty())
hdf5_file->SaveVector("/entry/instrument/detector/detectorSpecific/gain_file_names", start.gain_file_names);
for (const auto &i: start.calibration)
SaveCBORImage(hdf5_file, "/entry/instrument/detector/detectorSpecific/" + i.channel, i);
}
void HDF5Metadata::LinkToData(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
hsize_t total_images = end.max_image_number;
hsize_t width = start.image_size_x;
hsize_t height = start.image_size_y;
hsize_t images_per_file = start.images_per_file;
hsize_t file_count = 0;
if (start.images_per_file > 0) {
file_count = total_images / images_per_file;
if (total_images % images_per_file > 0)
file_count++;
}
if (total_images == 0)
return;
HDF5Group(*hdf5_file, "/entry/data").NXClass("NXdata");
HDF5DataType data_type(start.pixel_bit_depth / 8, true);
HDF5DataSpace full_data_space({total_images, height, width});
HDF5Dcpl dcpl;
for (hsize_t file_id = 0; file_id < file_count; file_id++) {
hsize_t images_in_file = images_per_file;
if (file_id == file_count - 1)
images_in_file = total_images - (file_count - 1) * images_per_file;
HDF5DataSpace src_data_space({images_in_file, height, width});
HDF5DataSpace virtual_data_space({total_images, height, width});
virtual_data_space.SelectHyperslab({file_id * images_per_file, 0, 0},{images_in_file, height, width});
dcpl.SetVirtual(DataFileName(start.file_prefix, file_id),
"/entry/data/data",src_data_space, virtual_data_space);
}
if (start.pixel_bit_depth == 16)
dcpl.SetFillValue16(INT16_MIN);
else
dcpl.SetFillValue32(INT32_MIN);
HDF5DataSet dataset(*hdf5_file, "/entry/data/data", data_type, full_data_space, dcpl);
dataset.Attr("image_nr_low", (int32_t) 1).Attr("image_nr_high", (int32_t) total_images);
/*
if (experiment.GetDetectorMode() == DetectorMode::Conversion)
dataset.Units("photon");
else
dataset.Units("ADU");
*/
}
std::string HDF5Metadata::DataFileName(const std::string &prefix, int64_t file_number) {
if (file_number < 0)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"File number cannot be negative");
else if (file_number >= 1000000)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"Format doesn't allow for 1 million or more files");
else
return fmt::format("{:s}_data_{:06d}.h5", prefix, file_number);
}
void HDF5Metadata::AzimuthalIntegration(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
if (!start.az_int_bin_to_q.empty()) {
HDF5Group rad_int_group(*hdf5_file, "/entry/result/azimIntegration");
rad_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q);
for (const auto &[x,y] : end.az_int_result)
rad_int_group.SaveVector(x, y);
}
}
void HDF5Metadata::ADUHistogram(HDF5File *hdf5_file, const EndMessage &end) {
if (!end.adu_histogram.empty()) {
HDF5Group adu_histo_group(*hdf5_file, "/entry/result/adu_histogram");
adu_histo_group.SaveScalar("bin_width", end.adu_histogram_bin_width);
for (const auto &[x, y]: end.adu_histogram)
adu_histo_group.SaveVector(x, y);
}
}