257 lines
11 KiB
C++
257 lines
11 KiB
C++
// Copyright (2019-2022) Paul Scherrer Institute
|
|
// SPDX-License-Identifier: GPL-3.0-or-later
|
|
|
|
#include <cmath>
|
|
#include "HDF5NXmx.h"
|
|
|
|
#include "../common/GitInfo.h"
|
|
#include "../include/spdlog/fmt/fmt.h"
|
|
#include "MakeDirectory.h"
|
|
#include <iostream>
|
|
|
|
void HDF5Metadata::NXmx( const StartMessage &start, const EndMessage &end) {
|
|
const std::string& filename = start.file_prefix + "_master.h5";
|
|
|
|
MakeDirectory(filename);
|
|
|
|
HDF5File hdf5_file(filename, true, true, false);
|
|
hdf5_file.Attr("file_name", filename);
|
|
|
|
hdf5_file.Attr("HDF5_Version", hdf5_version());
|
|
HDF5Group(hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
|
|
|
|
LinkToData(&hdf5_file, start, end);
|
|
Facility(&hdf5_file, start, end);
|
|
Time(&hdf5_file, start, end);
|
|
Detector(&hdf5_file, start, end);
|
|
Metrology(&hdf5_file, start, end);
|
|
Beam(&hdf5_file, start, end);
|
|
Sample(&hdf5_file, start, end);
|
|
Calibration(&hdf5_file, start, end);
|
|
}
|
|
|
|
void HDF5Metadata::Time(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
hdf5_file->Attr("file_time", end.end_date);
|
|
|
|
hdf5_file->SaveScalar("/entry/start_time", start.arm_date);
|
|
hdf5_file->SaveScalar("/entry/end_time", end.end_date);
|
|
hdf5_file->SaveScalar("/entry/end_time_estimated", end.end_date);
|
|
}
|
|
|
|
|
|
void HDF5Metadata::Facility(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
HDF5Group(*hdf5_file, "/entry/source").NXClass("NXsource");
|
|
SaveScalar(*hdf5_file, "/entry/source/name", start.source_name)
|
|
->Attr("short_name", start.source_name_short);
|
|
|
|
HDF5Group(*hdf5_file, "/entry/instrument").NXClass("NXinstrument");
|
|
SaveScalar(*hdf5_file, "/entry/instrument/name", start.instrument_name)
|
|
->Attr("short_name", start.instrument_name_short);
|
|
}
|
|
|
|
void HDF5Metadata::Detector(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
HDF5Group group(*hdf5_file, "/entry/instrument/detector");
|
|
group.NXClass("NXdetector");
|
|
SaveScalar(group, "beam_center_x", start.beam_center_x)->Units("pixel");
|
|
SaveScalar(group, "beam_center_y", start.beam_center_y)->Units("pixel");
|
|
SaveScalar(group, "distance", start.detector_distance)->Units("m");
|
|
SaveScalar(group, "detector_distance", start.detector_distance)->Units("m");
|
|
|
|
SaveScalar(group, "count_time", start.count_time)->Units("s");
|
|
SaveScalar(group, "frame_time", start.frame_time)->Units("s");
|
|
|
|
SaveScalar(group, "sensor_thickness", start.sensor_thickness)->Units("m");
|
|
SaveScalar(group, "x_pixel_size", start.pixel_size_x)->Units("m");
|
|
SaveScalar(group, "y_pixel_size", start.pixel_size_y)->Units("m");
|
|
SaveScalar(group, "sensor_material", start.sensor_material);
|
|
SaveScalar(group, "description", start.detector_description);
|
|
|
|
SaveScalar(group, "bit_depth_image", start.pixel_bit_depth);
|
|
SaveScalar(group, "bit_depth_readout", 16);
|
|
SaveScalar(group, "saturation_value", start.saturation_value);
|
|
SaveScalar(group, "underload_value", start.min_value);
|
|
SaveScalar(group, "flatfield_applied", false);
|
|
SaveScalar(group, "pixel_mask_applied", false);
|
|
SaveScalar(group, "acquisition_type", "triggered");
|
|
SaveScalar(group, "countrate_correction_applied", false);
|
|
|
|
// HDF5Group(group, "geometry").NXClass("NXgeometry");
|
|
|
|
// DIALS likes to have this soft link
|
|
H5Lcreate_soft("/entry/data/data", group.GetID(), "data",
|
|
H5P_DEFAULT, H5P_DEFAULT);
|
|
|
|
HDF5Group det_specific(group, "detectorSpecific");
|
|
det_specific.NXClass("NXcollection");
|
|
|
|
SaveScalar(det_specific, "nimages", end.number_of_images);
|
|
SaveScalar(det_specific, "ntrigger", 1);
|
|
|
|
SaveScalar(det_specific, "x_pixels_in_detector", static_cast<uint32_t>(start.image_size_x));
|
|
SaveScalar(det_specific, "y_pixels_in_detector", static_cast<uint32_t>(start.image_size_y));
|
|
SaveScalar(det_specific, "software_git_commit", jfjoch_git_sha1());
|
|
SaveScalar(det_specific, "software_git_date", jfjoch_git_date());
|
|
SaveScalar(det_specific, "storage_cell_number", static_cast<uint32_t>(start.storage_cell_number));
|
|
|
|
SaveScalar(det_specific, "data_collection_efficiency", end.efficiency);
|
|
SaveScalar(det_specific, "max_receiver_delay", end.max_receiver_delay);
|
|
}
|
|
|
|
void HDF5Metadata::Beam(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
HDF5Group group(*hdf5_file, "/entry/instrument/beam");
|
|
group.NXClass("NXbeam");
|
|
SaveScalar(group, "incident_wavelength", start.incident_wavelength)->Units("angstrom");
|
|
}
|
|
|
|
void HDF5Metadata::Sample(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
HDF5Group group(*hdf5_file, "/entry/sample");
|
|
group.NXClass("NXsample");
|
|
group.SaveScalar("name", start.sample_name);
|
|
|
|
if (start.space_group_number > 0)
|
|
group.SaveScalar("space_group", start.space_group_number);
|
|
|
|
if (start.unit_cell[0] > 0.0) {
|
|
std::vector<float> v = {start.unit_cell[0], start.unit_cell[1], start.unit_cell[2],
|
|
start.unit_cell[3], start.unit_cell[4], start.unit_cell[5]};
|
|
group.SaveVector("unit_cell", v);
|
|
}
|
|
|
|
if ((end.number_of_images > 0) && !start.goniometer.empty()) {
|
|
if (start.goniometer.size() == 1) {
|
|
for (auto &[key, value]: start.goniometer) {
|
|
group.SaveScalar("depends_on", "/entry/sample/transformations/" + key);
|
|
HDF5Group transformations(group, "transformations");
|
|
transformations.NXClass("NXtransformations");
|
|
std::vector<double> angle_container(end.number_of_images);
|
|
|
|
for (int32_t i = 0; i < end.number_of_images; i++)
|
|
angle_container[i] = value.start + i * value.increment;
|
|
SaveVector(transformations, key, angle_container)->
|
|
Transformation("deg", ".", "", "", "rotation",
|
|
{1,0,0}, // TODO: Implement axis vector
|
|
{0,0,0}, "");
|
|
}
|
|
|
|
}
|
|
} else
|
|
group.SaveScalar("depends_on", ".");
|
|
}
|
|
|
|
void HDF5Metadata::Metrology(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
HDF5Group transformations(*hdf5_file, "/entry/instrument/detector/transformations");
|
|
transformations.NXClass("NXtransformations");
|
|
|
|
std::vector<double> vector{-start.beam_center_x * start.pixel_size_x,
|
|
-start.beam_center_y * start.pixel_size_y,
|
|
start.detector_distance};
|
|
|
|
double vector_length = sqrt(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2]);
|
|
std::vector<double> vector_norm{vector[0] / vector_length, vector[1]/vector_length, vector[2]/vector_length};
|
|
|
|
SaveScalar(transformations, "translation", vector_length)->
|
|
Transformation("m", ".", "detector", "detector_arm",
|
|
"translation", vector_norm);
|
|
|
|
// https://manual.nexusformat.org/classes/base_classes/NXdetector_module.html?highlight=nxdetector_module
|
|
// The order of indices (i, j or i, j, k) is slow to fast.
|
|
// though EIGER has is the other way round
|
|
// Confusing....
|
|
std::vector<int32_t> origin = {0, 0};
|
|
std::vector<int32_t> size = {static_cast<int32_t>(start.image_size_y),
|
|
static_cast<int32_t>(start.image_size_x)};
|
|
|
|
DetectorModule(hdf5_file, "detector_module", origin, size,
|
|
{1,0,0}, {0,1,0}, "translation", start.pixel_size_x);
|
|
}
|
|
|
|
void HDF5Metadata::DetectorModule(HDF5File *hdf5_file, const std::string &name, const std::vector<int32_t> &origin,
|
|
const std::vector<int32_t> &size, const std::vector<double> &fast_axis,
|
|
const std::vector<double> &slow_axis,
|
|
const std::string &nx_axis, double pixel_size_mm) {
|
|
HDF5Group module_group(*hdf5_file, "/entry/instrument/detector/" + name);
|
|
|
|
module_group.NXClass("NXdetector_module");
|
|
|
|
module_group.SaveVector("data_origin", origin);
|
|
module_group.SaveVector("data_size", size);
|
|
|
|
SaveScalar(module_group, "fast_pixel_direction", pixel_size_mm)->
|
|
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
|
|
"", "", "translation", fast_axis,
|
|
{0,0,0}, "");
|
|
|
|
SaveScalar(module_group, "slow_pixel_direction", pixel_size_mm)->
|
|
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
|
|
"", "", "translation", slow_axis,
|
|
{0,0,0}, "");
|
|
|
|
SaveScalar(module_group, "module_offset", 0)->
|
|
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
|
|
"", "", "translation", {0,0,0});
|
|
}
|
|
|
|
void HDF5Metadata::Calibration(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
if (start.pixel_mask.count("sc0") == 1) {
|
|
auto v = start.pixel_mask.at("sc0");
|
|
std::vector<hsize_t> dims = {start.image_size_y, start.image_size_x};
|
|
SaveVector(*hdf5_file, "/entry/instrument/detector/pixel_mask", v, dims, CompressionAlgorithm::BSHUF_LZ4);
|
|
hdf5_file->HardLink("/entry/instrument/detector/pixel_mask",
|
|
"/entry/instrument/detector/detectorSpecific/pixel_mask");
|
|
}
|
|
}
|
|
|
|
void HDF5Metadata::LinkToData(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
|
|
hsize_t total_images = end.number_of_images;
|
|
hsize_t width = start.image_size_x;
|
|
hsize_t height = start.image_size_y;
|
|
hsize_t stride = start.data_file_count;
|
|
hsize_t file_count = std::min<hsize_t>(stride, total_images);
|
|
|
|
if (total_images == 0)
|
|
return;
|
|
|
|
HDF5Group(*hdf5_file, "/entry/data").NXClass("NXdata");
|
|
|
|
HDF5DataType data_type(start.pixel_bit_depth / 8, true);
|
|
HDF5DataSpace full_data_space({total_images, height, width});
|
|
HDF5Dcpl dcpl;
|
|
|
|
for (hsize_t file_id = 0; file_id < file_count; file_id++) {
|
|
hsize_t images_in_file = total_images / stride;
|
|
if (total_images % stride > file_id)
|
|
images_in_file++;
|
|
|
|
HDF5DataSpace src_data_space({images_in_file, height, width});
|
|
HDF5DataSpace virtual_data_space({total_images, height, width});
|
|
virtual_data_space.SelectHyperslabWithStride({file_id, 0, 0},{images_in_file, height, width},{stride,1,1});
|
|
dcpl.SetVirtual(DataFileName(start.file_prefix, file_id),
|
|
"/entry/data/data",src_data_space, virtual_data_space);
|
|
}
|
|
|
|
if (start.pixel_bit_depth == 16)
|
|
dcpl.SetFillValue16(INT16_MIN);
|
|
else
|
|
dcpl.SetFillValue32(INT32_MIN);
|
|
|
|
HDF5DataSet dataset(*hdf5_file, "/entry/data/data", data_type, full_data_space, dcpl);
|
|
dataset.Attr("image_nr_low", (int32_t) 1).Attr("image_nr_high", (int32_t) total_images);
|
|
|
|
/*
|
|
if (experiment.GetDetectorMode() == DetectorMode::Conversion)
|
|
dataset.Units("photon");
|
|
else
|
|
dataset.Units("ADU");
|
|
*/
|
|
}
|
|
|
|
std::string HDF5Metadata::DataFileName(const std::string &prefix, int64_t file_number) {
|
|
if (file_number < 0)
|
|
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
|
|
"File number cannot be negative");
|
|
else if (file_number >= 1000)
|
|
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
|
|
"Format doesn't allow for more than 1 thousand files");
|
|
else
|
|
return fmt::format("{:s}_data_{:03d}.h5", prefix, file_number);
|
|
} |