From 0febe9856f809e761bff58b91645fb4a751c42e0 Mon Sep 17 00:00:00 2001 From: Filip Leonarski Date: Wed, 17 May 2023 20:07:25 +0200 Subject: [PATCH] HDF5DataFile: spot information is written as images are coming --- writer/HDF5DataFile.cpp | 36 +++++++++++++++++++++++------------ writer/HDF5DataFile.h | 6 +++--- writer/HDF5Objects.cpp | 2 +- writer/HDF5Objects.h | 42 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 70 insertions(+), 16 deletions(-) diff --git a/writer/HDF5DataFile.cpp b/writer/HDF5DataFile.cpp index 85d8255a..f562d4ff 100644 --- a/writer/HDF5DataFile.cpp +++ b/writer/HDF5DataFile.cpp @@ -30,9 +30,6 @@ HDF5DataFile::~HDF5DataFile() { if (!spot_count.empty()) { std::vector dims = {spot_count.size(), max_spots}; result_group->SaveVector("nPeaks", spot_count); - result_group->SaveVector("peakXPosRaw", spot_x, dims, CompressionAlgorithm::BSHUF_LZ4); - result_group->SaveVector("peakYPosRaw", spot_y, dims, CompressionAlgorithm::BSHUF_LZ4); - result_group->SaveVector("peakTotalIntensity", spot_intensity, dims, CompressionAlgorithm::BSHUF_LZ4); result_group->SaveVector("indexingResult", indexing_result); if (!rad_int_bin_to_q.empty()) @@ -65,10 +62,20 @@ void HDF5DataFile::CreateFile() { HDF5DataSpace data_space({1, ypixel, xpixel},{H5S_UNLIMITED, ypixel, xpixel}); data_set = std::make_unique(*data_file, "/entry/data/data", data_type, data_space, dcpl); + HDF5DataSpace data_space_spots({1, max_spots},{H5S_UNLIMITED, max_spots}); + HDF5Dcpl dcpl_spots; + dcpl_spots.SetChunking({1, max_spots}); + + data_set_spot_x = std::make_unique(*data_file, "/entry/result/peakXPosRaw", + HDF5DataType(0.0f), data_space_spots, dcpl_spots); + + data_set_spot_y = std::make_unique(*data_file, "/entry/result/peakYPosRaw", + HDF5DataType(0.0f), data_space_spots, dcpl_spots); + + data_set_spot_int = std::make_unique(*data_file, "/entry/result/TotalIntensity", + HDF5DataType(0.0f), data_space_spots, dcpl_spots); + spot_count.resize(1); - spot_x.resize(max_spots); - spot_y.resize(max_spots); - spot_intensity.resize(max_spots); indexing_result.resize(1); bunch_id.resize(1); jf_info.resize(1); @@ -84,10 +91,11 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) { if (image_number > max_image_number) { max_image_number = image_number; data_set->SetExtent({max_image_number+1, ypixel, xpixel}); + data_set_spot_x->SetExtent({max_image_number+1, max_spots}); + data_set_spot_y->SetExtent({max_image_number+1, max_spots}); + data_set_spot_int->SetExtent({max_image_number+1, max_spots}); + spot_count.resize(max_image_number + 1); - spot_x.resize(max_spots * (max_image_number + 1)); - spot_y.resize(max_spots * (max_image_number + 1)); - spot_intensity.resize(max_spots * (max_image_number + 1)); indexing_result.resize(max_image_number + 1); bunch_id.resize(max_image_number + 1); jf_info.resize(max_image_number + 1); @@ -102,11 +110,15 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) { spot_count[image_number] = cnt; + std::vector spot_x(max_spots), spot_y(max_spots), spot_intensity(max_spots); for (int i = 0; i < cnt; i++) { - spot_x[max_spots * image_number + i] = msg.spots[i].x; - spot_y[max_spots * image_number + i] = msg.spots[i].y; - spot_intensity[max_spots * image_number + i] = msg.spots[i].intensity; + spot_x[i] = msg.spots[i].x; + spot_y[i] = msg.spots[i].y; + spot_intensity[i] = msg.spots[i].intensity; } + data_set_spot_x->WriteVec(spot_x, {image_number, 0}, {1, max_spots}); + data_set_spot_y->WriteVec(spot_y, {image_number, 0}, {1, max_spots}); + data_set_spot_int->WriteVec(spot_intensity, {image_number, 0}, {1, max_spots}); indexing_result[image_number] = msg.indexing_result; bunch_id[image_number] = msg.bunch_id; diff --git a/writer/HDF5DataFile.h b/writer/HDF5DataFile.h index fc8747e4..f84fdd61 100644 --- a/writer/HDF5DataFile.h +++ b/writer/HDF5DataFile.h @@ -23,6 +23,9 @@ class HDF5DataFile { std::unique_ptr data_file = nullptr; std::unique_ptr data_set = nullptr; + std::unique_ptr data_set_spot_x = nullptr; + std::unique_ptr data_set_spot_y = nullptr; + std::unique_ptr data_set_spot_int = nullptr; std::unique_ptr result_group = nullptr; std::unique_ptr rad_int_group = nullptr; @@ -44,9 +47,6 @@ class HDF5DataFile { std::vector rad_int_bin_to_q; std::vector rad_int_file_avg; - std::vector spot_x; - std::vector spot_y; - std::vector spot_intensity; std::vector spot_count; const size_t max_spots; diff --git a/writer/HDF5Objects.cpp b/writer/HDF5Objects.cpp index 9f36ef1c..23899ed9 100644 --- a/writer/HDF5Objects.cpp +++ b/writer/HDF5Objects.cpp @@ -74,7 +74,7 @@ std::vector HDF5DataSpace::GetDimensions() const { void HDF5DataSpace::SelectHyperslab(const std::vector &start, const std::vector &size) { if ((start.size() != ndims) || (size.size() != ndims)) - throw JFJochException(JFJochExceptionCategory::HDF5, "Arrays are inconsistent with dataspace dimension number"); + throw JFJochException(JFJochExceptionCategory::HDF5, "Arrays are inconsistent with dataspace dimension number " + std::to_string(ndims)); if (H5Sselect_hyperslab(id, H5S_SELECT_SET, start.data(), NULL, size.data(), NULL) < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot select hyperslab"); diff --git a/writer/HDF5Objects.h b/writer/HDF5Objects.h index 760e4720..8fa8c7eb 100644 --- a/writer/HDF5Objects.h +++ b/writer/HDF5Objects.h @@ -115,6 +115,8 @@ public: std::vector dim = {}, CompressionAlgorithm algorithm = CompressionAlgorithm::NO_COMPRESSION); }; +#include + class HDF5Group : public HDF5Object { public: HDF5Group(const HDF5Object& parent, const std::string& name); @@ -148,6 +150,46 @@ public: const HDF5DataSpace &data_space); ~HDF5DataSet(); HDF5DataSet& Write(const HDF5DataType &data_type, const void *val); + + template + HDF5DataSet& WriteVec(const std::vector &v, + const std::vector &start, + const std::vector &size) { + HDF5DataSpace mem_space({v.size()}); + HDF5DataSpace file_space(*this); + std::cout << int32_t(file_space.GetNumOfDimensions()) << std::endl; + + std::cout << int32_t(file_space.GetDimensions()[0]) << std::endl; + std::cout << int32_t(mem_space.GetDimensions()[0]) << std::endl; + + file_space.SelectHyperslab(start, size); + if (H5Dwrite(id, + HDF5DataType(v[0]).GetID(), + mem_space.GetID(), + file_space.GetID(), + H5P_DEFAULT, + v.data()) < 0) + throw JFJochException(JFJochExceptionCategory::HDF5, "Vector dataset write unsuccessful"); + return *this; + } + + template + HDF5DataSet& WriteScalar(const T &val, + const std::vector &start) { + HDF5DataSpace mem_space({1}); + HDF5DataSpace file_space(*this); + std::cout << uint32_t(file_space.GetNumOfDimensions()) << std::endl; + file_space.SelectHyperslab(start, {1}); + if (H5Dwrite(id, + HDF5DataType(val).GetID(), + mem_space.GetID(), + file_space.GetID(), + H5P_DEFAULT, + &val) < 0) + throw JFJochException(JFJochExceptionCategory::HDF5, "Vector dataset write unsuccessful"); + return *this; + } + HDF5DataSet& WriteDirectChunk(const void *val, hsize_t data_size, const std::vector& offset); HDF5DataSet& Flush();