Files
Jungfraujoch/writer/HDF5DataFile.cpp
leonarski_f f3e0a15d26
All checks were successful
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m51s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 8m0s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 9m6s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 10m7s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 9m47s
Build Packages / Generate python client (push) Successful in 29s
Build Packages / Build documentation (push) Successful in 43s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 10m46s
Build Packages / build:rpm (rocky8) (push) Successful in 9m33s
Build Packages / Unit tests (push) Has been skipped
Build Packages / build:rpm (ubuntu2204) (push) Successful in 8m47s
Build Packages / build:rpm (rocky9) (push) Successful in 9m55s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 9m4s
v1.0.0-rc.127 (#34)
This is an UNSTABLE release. The release has significant modifications and bug fixes, if things go wrong, it is better to revert to 1.0.0-rc.124.

* jfjoch_broker: Default EIGER readout time is 20 microseconds
* jfjoch_broker: Multiple improvements regarding performance
* jfjoch_broker: Image buffer allows to track frames in preparation and sending
* jfjoch_broker: Dedicated thread for ZeroMQ transmission to better utilize the image buffer
* jfjoch_broker: Experimental implementation of transmission with raw TCP/IP sockets
* jfjoch_writer: Fixes regarding properly closing files in long data collections
* jfjoch_process: Scale & merge has been significantly improved, but it is not yet integrated into mainstream code

Reviewed-on: #34
2026-03-02 15:57:12 +01:00

168 lines
5.2 KiB
C++

// SPDX-FileCopyrightText: 2024 Filip Leonarski, Paul Scherrer Institute <filip.leonarski@psi.ch>
// SPDX-License-Identifier: GPL-3.0-only
#include <sys/stat.h>
#include <filesystem>
#include <iostream>
#include "HDF5DataFile.h"
#include "../compression/JFJochCompressor.h"
#include "HDF5DataFilePluginAzInt.h"
#include "HDF5DataFilePluginMX.h"
#include "HDF5DataFilePluginXFEL.h"
#include "HDF5DataFilePluginDetector.h"
#include "HDF5DataFilePluginROI.h"
#include "../include/spdlog/fmt/fmt.h"
#include "HDF5NXmx.h"
#include "HDF5DataFilePluginImageStats.h"
#include "HDF5DataFilePluginReflection.h"
#include "../common/time_utc.h"
HDF5DataFile::HDF5DataFile(const StartMessage &msg, uint64_t in_file_number) {
file_number = in_file_number;
if (msg.overwrite.has_value())
overwrite = msg.overwrite.value();
xpixel = 0;
ypixel = 0;
max_image_number = 0;
nimages = 0;
filename = HDF5Metadata::DataFileName(msg, file_number);
image_low = file_number * msg.images_per_file;
uint64_t tmp_suffix;
try {
if (!msg.arm_date.empty())
tmp_suffix = parse_UTC_to_ms(msg.arm_date);
} catch (...) {
tmp_suffix = std::chrono::system_clock::now().time_since_epoch().count();
}
tmp_filename = fmt::format("{}.{:08x}.tmp", filename, tmp_suffix);
plugins.emplace_back(std::make_unique<HDF5DataFilePluginROI>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginDetector>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginAzInt>(msg));
plugins.emplace_back(std::make_unique<HDF5DataFilePluginXFEL>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginMX>(msg));
plugins.emplace_back(std::make_unique<HDF5DataFilePluginImageStats>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginReflection>());
}
std::optional<HDF5DataFileStatistics> HDF5DataFile::Close() {
if (!data_file)
return {};
HDF5Group group_exp(*data_file, "/entry/detector");
group_exp.NXClass("NXcollection");
group_exp.SaveVector("timestamp", timestamp);
group_exp.SaveVector("exptime", exptime);
group_exp.SaveVector("number", number);
for (auto &p: plugins)
p->WriteFinal(*data_file);
if (data_set) {
data_set
->Attr("image_nr_low", (int32_t) (image_low + 1))
.Attr("image_nr_high", (int32_t) (image_low + 1 + max_image_number));
data_set.reset();
}
data_file.reset();
if (!std::filesystem::exists(filename.c_str()) || overwrite)
std::rename(tmp_filename.c_str(), filename.c_str());
closed = true;
HDF5DataFileStatistics ret;
ret.max_image_number = max_image_number;
ret.total_images = nimages;
ret.filename = filename;
ret.file_number = file_number + 1;
return ret;
}
HDF5DataFile::~HDF5DataFile() {
if (data_file) {
try {
Close();
} catch (const std::exception &e) {
std::cerr << "HDF5DataFile::~HDF5DataFile: " << e.what() << std::endl;
} catch (...) {
std::cerr << "HDF5DataFile::~HDF5DataFile: Unknown error " << std::endl;
}
}
}
void HDF5DataFile::CreateFile(const DataMessage& msg) {
HDF5Dcpl dcpl;
bool pixel_signed;
HDF5DataType data_type(msg.image.GetMode());
xpixel = msg.image.GetWidth();
ypixel = msg.image.GetHeight();
dcpl.SetCompression(msg.image.GetCompressionAlgorithm(), JFJochBitShuffleCompressor::DefaultBlockSize);
dcpl.SetChunking( {1, ypixel, xpixel});
switch (msg.image.GetMode()) {
case CompressedImageMode::Int8:
dcpl.SetFillValue8(INT8_MIN);
break;
case CompressedImageMode::Int16:
dcpl.SetFillValue16(INT16_MIN);
break;
case CompressedImageMode::Int32:
dcpl.SetFillValue32(INT32_MIN);
break;
default:
break;
}
data_file = std::make_unique<HDF5File>(tmp_filename);
HDF5Group(*data_file, "/entry").NXClass("NXentry");
HDF5Group(*data_file, "/entry/data").NXClass("NXdata");
HDF5DataSpace data_space({1, ypixel, xpixel}, {H5S_UNLIMITED, ypixel, xpixel});
data_set = std::make_unique<HDF5DataSet>(*data_file, "/entry/data/data", data_type, data_space, dcpl);
for (auto &p: plugins)
p->OpenFile(*data_file, msg);
}
void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) {
if (closed)
return;
bool new_file = false;
if (!data_file) {
CreateFile(msg);
new_file = true;
}
if (new_file || (static_cast<int64_t>(image_number) > max_image_number)) {
max_image_number = image_number;
data_set->SetExtent({max_image_number+1, ypixel, xpixel});
timestamp.resize(max_image_number + 1);
exptime.resize(max_image_number + 1);
number.resize(max_image_number + 1);
}
nimages++;
data_set->WriteDirectChunk(msg.image.GetCompressed(), msg.image.GetCompressedSize(), {image_number, 0, 0});
for (auto &p: plugins)
p->Write(msg, image_number);
timestamp[image_number] = msg.timestamp;
exptime[image_number] = msg.exptime;
number[image_number] = (msg.original_number) ? msg.original_number.value() : msg.number;
}
size_t HDF5DataFile::GetNumImages() const {
return nimages;
}