Compare commits

...

2 Commits

Author SHA1 Message Date
8809b8d0d5 Merge branch '2405-writer-metadata' into 'main'
Extra metadata in HDF5 writer completed file stream

See merge request jungfraujoch/nextgendcu!69
2024-06-05 18:18:12 +02:00
86b3934387 Extra metadata in HDF5 writer completed file stream 2024-06-05 18:18:12 +02:00
6 changed files with 90 additions and 33 deletions

View File

@@ -316,11 +316,8 @@ release:
- export PACKAGE_VERSION=`head -n1 VERSION`
- export PACKAGE_REGISTRY_URL="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/jungfraujoch/${PACKAGE_VERSION}"
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm "${PACKAGE_REGISTRY_URL}/jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm "${PACKAGE_REGISTRY_URL}/jfjoch-driver-dkms.el8.noarch.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm "${PACKAGE_REGISTRY_URL}/jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm "${PACKAGE_REGISTRY_URL}/jfjoch-writer.el8.x86_64.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm "${PACKAGE_REGISTRY_URL}/jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm "${PACKAGE_REGISTRY_URL}/jfjoch.el8.x86_64.rpm"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch_driver.tar.gz "${PACKAGE_REGISTRY_URL}/jfjoch_driver.tar.gz"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch_frontend.tar.gz "${PACKAGE_REGISTRY_URL}/jfjoch_frontend.tar.gz"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file jfjoch_fpga_pcie_100g.mcs "${PACKAGE_REGISTRY_URL}/jfjoch_fpga_pcie_100g.mcs"'
@@ -331,9 +328,6 @@ release:
--assets-link "{\"name\":\"jfjoch_frontend.tar.gz\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch_frontend.tar.gz\"}"
--assets-link "{\"name\":\"jfjoch_fpga_pcie_8x10g.mcs\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch_fpga_pcie_8x10g.mcs\"}"
--assets-link "{\"name\":\"jfjoch_fpga_pcie_100g.mcs\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch_fpga_pcie_100g.mcs\"}"
--assets-link "{\"name\":\"jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm\"}"
--assets-link "{\"name\":\"jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm\"}"
--assets-link "{\"name\":\"jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm\"}"
--assets-link "{\"name\":\"jfjoch.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch.el8.x86_64.rpm\"}"
--assets-link "{\"name\":\"jfjoch-writer.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-writer.el8.x86_64.rpm\"}"
--assets-link "{\"name\":\"jfjoch-driver-dkms.el8.noarch.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-driver-dkms.el8.noarch.rpm\"}"
--assets-link "{\"name\":\"jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"link_type\":\"package\"}"
--assets-link "{\"name\":\"jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-writer-${PACKAGE_VERSION}-1.el8.x86_64.rpm\",\"link_type\":\"package\"}"
--assets-link "{\"name\":\"jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm\",\"url\":\"${PACKAGE_REGISTRY_URL}/jfjoch-driver-dkms-${PACKAGE_VERSION}-1.el8.noarch.rpm\",\"link_type\":\"package\"}"

View File

@@ -1 +1 @@
1.0.0_rc.7
1.0.0_rc.8

View File

@@ -226,7 +226,11 @@ TEST_CASE("HDF5Writer_Socket", "[HDF5][Full]") {
std::vector<SpotToSave> spots;
x.FilePrefix("test05").ImagesPerTrigger(5).ImagesPerFile(2).Compression(CompressionAlgorithm::NO_COMPRESSION)
.HeaderAppendix("{\"z\":567}");
.HeaderAppendix("{\"z\":567}").DetectorDistance_mm(155).BeamX_pxl(1606.62).BeamY_pxl(1669.59)
.FrameTime(std::chrono::microseconds(1000), std::chrono::microseconds(100))
.PhotonEnergy_keV(12.07).SetUnitCell(UnitCell{.a = 97, .b = 97, .c = 38, .alpha= 90, .beta = 90, .gamma = 90})
.SpaceGroupNumber(96);
StartMessage start_message;
x.FillMessage(start_message);
@@ -259,11 +263,15 @@ TEST_CASE("HDF5Writer_Socket", "[HDF5][Full]") {
nlohmann::json j;
REQUIRE(s.Receive(msg, true));
j = nlohmann::json::parse(std::string((char *) msg.data(), msg.size()));
REQUIRE(j["filename"] == "test05_data_000001.h5");
REQUIRE(j["nimages"] == 2);
REQUIRE(j["photon_energy_eV"] == Catch::Approx(x.GetPhotonEnergy_keV() * 1000.0));
REQUIRE(j["space_group_number"] == 96);
REQUIRE(j.contains("user_data"));
REQUIRE(j["user_data"]["z"] == 567);
std::cout << j.dump(4) << std::endl;
REQUIRE(s.Receive(msg, true));
j = nlohmann::json::parse(std::string((char *) msg.data(), msg.size()));

View File

@@ -5,11 +5,9 @@
#include <nlohmann/json.hpp>
HDF5Writer::HDF5Writer(const StartMessage &request)
: images_per_file(request.images_per_file),
file_prefix(request.file_prefix),
max_spot_count(request.max_spot_count),
az_int_bin_to_q(request.az_int_bin_to_q),
user_data(request.user_data) {}
: start_message(request) {
}
void HDF5Writer::Write(const DataMessage& message) {
std::lock_guard<std::mutex> lock(hdf5_mutex);
@@ -21,23 +19,23 @@ void HDF5Writer::Write(const DataMessage& message) {
size_t file_number = 0;
size_t image_number = message.number;
if (images_per_file > 0) {
file_number = message.number / images_per_file;
image_number = message.number % images_per_file;
if (start_message.images_per_file > 0) {
file_number = message.number / start_message.images_per_file;
image_number = message.number % start_message.images_per_file;
}
if (files.size() <= file_number)
files.resize(file_number + 1);
if (!files[file_number])
files[file_number] = std::make_unique<HDF5DataFile>(HDF5Metadata::DataFileName(file_prefix, file_number),
az_int_bin_to_q,
file_number * images_per_file,
max_spot_count);
files[file_number] = std::make_unique<HDF5DataFile>(HDF5Metadata::DataFileName(start_message.file_prefix, file_number),
start_message.az_int_bin_to_q,
file_number * start_message.images_per_file,
start_message.max_spot_count);
// Ignore zero size images
if (message.image.size > 0)
files[file_number]->Write(message, image_number);
if (files[file_number]->GetNumImages() == images_per_file)
if (files[file_number]->GetNumImages() == start_message.images_per_file)
AddStats(files[file_number]->Close());
}
@@ -59,14 +57,35 @@ void HDF5Writer::AddStats(const std::optional<HDF5DataFileStatistics>& s) {
nlohmann::json j;
j["filename"] = s->filename;
j["nimages"] = s->total_images;
if (!user_data.empty()) {
j["detector_distance_m"] = start_message.detector_distance;
j["beam_x_pxl"] = start_message.beam_center_x;
j["beam_y_pxl"] = start_message.beam_center_y;
j["pixel_size_m"] = start_message.pixel_size_x;
j["detector_width_pxl"] = start_message.image_size_x;
j["detector_height_pxl"] = start_message.image_size_y;
j["photon_energy_eV"] = start_message.incident_energy;
j["saturation"] = start_message.saturation_value;
if (start_message.unit_cell) {
j["unit_cell"]["a"] = start_message.unit_cell->a;
j["unit_cell"]["b"] = start_message.unit_cell->b;
j["unit_cell"]["c"] = start_message.unit_cell->c;
j["unit_cell"]["alpha"] = start_message.unit_cell->alpha;
j["unit_cell"]["beta"] = start_message.unit_cell->beta;
j["unit_cell"]["gamma"] = start_message.unit_cell->gamma;
}
if (start_message.space_group_number > 0)
j["space_group_number"] = start_message.space_group_number;
if (start_message.error_value)
j["underload"] = start_message.error_value.value();
if (!start_message.user_data.empty()) {
nlohmann::json j_userdata;
// if user_data is valid json, interpret it as such, otherwise embed as string
try {
j_userdata = nlohmann::json::parse(user_data);
j_userdata = nlohmann::json::parse(start_message.user_data);
} catch (...) {
j_userdata = user_data;
j_userdata = start_message.user_data;
}
j["user_data"] = j_userdata;
}

View File

@@ -10,13 +10,10 @@
#include "../common/ZMQWrappers.h"
class HDF5Writer {
StartMessage start_message;
std::vector<std::unique_ptr<HDF5DataFile> > files;
int64_t images_per_file;
std::string file_prefix;
uint64_t max_spot_count;
std::vector<float> az_int_bin_to_q;
std::vector<HDF5DataFileStatistics> stats;
std::string user_data;
std::unique_ptr<ZMQSocket> socket;

View File

@@ -45,9 +45,24 @@ Creates PUB socket to inform about finalized data files. For each closed file, t
"filename": <string>: HDF5 data file name (relative to writer root directory),
"nimages": <int> number of images in the file,
"user_data": <string> or <json> user_data
"beam_x_pxl": <float> beam center (X) in pixels,
"beam_y_pxl": <float> beam center (Y) in pixels,
"detector_distance_m": <float> detector distance (X) in m,
"detector_height_pxl": <int> detector size (X) in pixels,
"detector_width_pxl": <int> detector size (Y) in pixels,
"photon_energy_eV": <float> photon energy of the X-ray beam,
"pixel_size_m": <float> pixel size in meter (assuming pixel X == Y),
"saturation": <int> this count and higher mean saturation,
"space_group_number": <int> space group number (optional),
"underload": <int> pixels with this count should be excluded,
"unit_cell": <optinal> unit cell dimensions in Angstrom/degree {
"a": <float>, "b": <float>, "c": <float>,
"alpha": <float>, "beta": <float>, "gamma": <float>
},
}
```
`user_data` is defined as `header_appendix` in the `/start` operation in the `jfjoch_broker`.
Other metadata are also carried over from `/start` operation.
If the `header_appendix` is a string with valid JSON meaning, it will be embedded as JSON, otherwise it will be escaped as string.
For example `header_appendix` of `{"param1": "test1", "param2": ["test1", "test2"]}`, than example message will look as follows:
@@ -55,6 +70,26 @@ For example `header_appendix` of `{"param1": "test1", "param2": ["test1", "test2
{
"filename": "dataset_name_data_000001.h5",
"nimages": 1000,
"beam_x_pxl": 1200,
"beam_y_pxl": 1500,
"detector_distance_m": 0.155,
"detector_height_pxl": 2164,
"detector_width_pxl": 2068,
"image_time_s": 0.001,
"nimages": 2,
"photon_energy_eV": 12400.0,
"pixel_size_m": 7.5e-05,
"saturation": 32766,
"space_group_number": 96,
"underload": -32768,
"unit_cell": {
"a": 78.0,
"alpha": 90.0,
"b": 78.0,
"beta": 90.0,
"c": 39.0,
"gamma": 90.0
},
"user_data": {
"param1": "test1",
"param2": ["test1", "test2"]
@@ -68,6 +103,10 @@ Notifications for finalized files are optional, if notification port number is o
Jungfraujoch aims to generate files compliant with NXmx format, as well as make them as close as possible to files
written by DECTRIS Filewriter. This ensures the file compatibility of Neggia and Durin XDS plugins, as well as Albula viewer.
If spot finding is enabled, spots are written in the [CXI format](https://raw.githubusercontent.com/cxidb/CXI/master/cxi_file_format.pdf) and is recoginzed by CrystFEL.
If spot finding is enabled, spots are written in the [CXI format](https://raw.githubusercontent.com/cxidb/CXI/master/cxi_file_format.pdf) and are recognized by CrystFEL. The following ahs to be added to the geometry file:
```
peak_list = /opt/MX
peak_list_type = cxi
```
There are custom extension to NXmx format. These will be documented in the future.