Compare commits

..

12 Commits

Author SHA1 Message Date
aa72d1e59b Fix XDS test
Some checks failed
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m46s
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 16m4s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m4s
Build Packages / build:rpm (rocky8) (push) Successful in 16m29s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 16m56s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 17m6s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 17m23s
Build Packages / Generate python client (push) Successful in 1m11s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 1m41s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m25s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 12m47s
Build Packages / build:rpm (rocky9) (push) Successful in 13m6s
Build Packages / Data processing test (push) Failing after 13m47s
Build Packages / Unit tests (push) Successful in 1h1m48s
2026-03-30 18:35:39 +02:00
72613ca3a2 CI: Add one more XDS test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 13m55s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m30s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m44s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 17m12s
Build Packages / build:rpm (rocky8) (push) Successful in 17m25s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m40s
Build Packages / Generate python client (push) Successful in 1m13s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Failing after 58s
Build Packages / build:rpm (rocky9) (push) Successful in 19m6s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 19m15s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m54s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 9m4s
Build Packages / Data processing test (push) Failing after 11m16s
Build Packages / Unit tests (push) Successful in 58m18s
2026-03-30 14:54:00 +02:00
69c090bfa4 HDF5NXmx: Always use _master.h5 for consistency 2026-03-30 14:53:11 +02:00
e57a5e4f79 CI: Add XDS test
Some checks failed
Build Packages / build:rpm (ubuntu2204) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404) (push) Has been cancelled
Build Packages / Data processing test (push) Has been cancelled
Build Packages / Generate python client (push) Has been cancelled
Build Packages / build:rpm (rocky8_sls9) (push) Has been cancelled
Build Packages / Build documentation (push) Has been cancelled
Build Packages / Unit tests (push) Has been cancelled
Build Packages / Create release (push) Has been cancelled
Build Packages / build:rpm (rocky9) (push) Has been cancelled
Build Packages / build:rpm (rocky9_sls9) (push) Has been cancelled
Build Packages / build:rpm (rocky8) (push) Has been cancelled
Build Packages / build:rpm (rocky8_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky9_nocuda) (push) Has been cancelled
2026-03-30 14:44:07 +02:00
dfbadc4cb3 CI: Separate directories for each test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m11s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Has started running
Build Packages / build:rpm (rocky8_sls9) (push) Has started running
Build Packages / build:rpm (rocky8) (push) Has started running
Build Packages / build:rpm (ubuntu2404) (push) Has been cancelled
Build Packages / DIALS processing test (push) Has been cancelled
Build Packages / Generate python client (push) Has been cancelled
Build Packages / Build documentation (push) Has been cancelled
Build Packages / Unit tests (push) Has been cancelled
Build Packages / Create release (push) Has been cancelled
Build Packages / build:rpm (rocky9_sls9) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2204) (push) Has been cancelled
Build Packages / build:rpm (rocky9_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky9) (push) Has been cancelled
2026-03-30 14:31:36 +02:00
baf36c9c0a CI: Fix empty section
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m16s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 12m22s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m5s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m19s
Build Packages / Generate python client (push) Successful in 56s
Build Packages / build:rpm (rocky8) (push) Successful in 17m21s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m43s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 50s
Build Packages / build:rpm (rocky9) (push) Successful in 18m27s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m43s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 8m48s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m54s
Build Packages / DIALS processing test (push) Successful in 11m26s
Build Packages / Unit tests (push) Has been cancelled
2026-03-30 13:52:44 +02:00
b91fbb03fb Frontend: Fix typo 2026-03-30 13:52:34 +02:00
7570794a60 Write single file option enabled. Minor issues:
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Failing after 6m2s
Build Packages / build:rpm (rocky9_nocuda) (push) Failing after 7m30s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Failing after 8m18s
Build Packages / DIALS processing test (push) Failing after 0s
Build Packages / build:rpm (rocky8_sls9) (push) Failing after 8m45s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Failing after 8m52s
Build Packages / Generate python client (push) Successful in 2m0s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky9_sls9) (push) Failing after 10m28s
Build Packages / Build documentation (push) Successful in 2m27s
Build Packages / build:rpm (rocky9) (push) Failing after 11m35s
Build Packages / build:rpm (rocky8) (push) Failing after 11m47s
Build Packages / build:rpm (ubuntu2204) (push) Failing after 7m45s
Build Packages / build:rpm (ubuntu2404) (push) Failing after 6m16s
Build Packages / Unit tests (push) Successful in 56m23s
1. What if write_master_file = false
2. Finalized file statistics use wrong name
2026-03-30 12:29:36 +02:00
6639b584ec Minor improvements to integrated NXmx workflow
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Failing after 6m27s
Build Packages / build:rpm (rocky9_nocuda) (push) Failing after 6m48s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Failing after 8m21s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Failing after 9m18s
Build Packages / build:rpm (rocky8_sls9) (push) Failing after 9m20s
Build Packages / Generate python client (push) Successful in 49s
Build Packages / build:rpm (rocky9) (push) Failing after 10m21s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky8) (push) Failing after 11m36s
Build Packages / Build documentation (push) Successful in 2m20s
Build Packages / build:rpm (rocky9_sls9) (push) Failing after 12m45s
Build Packages / build:rpm (ubuntu2404) (push) Failing after 7m55s
Build Packages / build:rpm (ubuntu2204) (push) Failing after 8m21s
Build Packages / DIALS processing test (push) Successful in 10m14s
Build Packages / Unit tests (push) Failing after 56m6s
2026-03-29 13:22:36 +02:00
cd0fa49f73 HDF5: Groups and attributes creation can be reused 2026-03-29 13:22:08 +02:00
91dd670043 OpenAPI: Add integrated NXmx file writer format 2026-03-28 20:07:25 +01:00
81bd9a06a1 CI pipeline upgrade (#42)
All checks were successful
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m16s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 14m23s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 15m5s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 15m56s
Build Packages / Generate python client (push) Successful in 1m18s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m34s
Build Packages / build:rpm (rocky8) (push) Successful in 17m48s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 54s
Build Packages / build:rpm (rocky9) (push) Successful in 18m40s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m55s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m4s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 8m53s
Build Packages / DIALS processing test (push) Successful in 8m45s
Build Packages / Unit tests (push) Successful in 56m4s
Updates to CI pipeline

* New docker image for Ubuntu 22.04 with CMake 3.26
* New docker image for Rocky 9 with DIALS 3.27
* New automated test to check for DIALS processing with xia2.ssx

Reviewed-on: #42
2026-03-28 20:06:23 +01:00
31 changed files with 844 additions and 429 deletions

View File

@@ -76,27 +76,16 @@ jobs:
upload_url: https://gitea.psi.ch/api/packages/mx/debian/pool/noble/nocuda/upload
steps:
- uses: actions/checkout@v4
- name: Install CMake 3.26 on Ubuntu 22.04
if: matrix.distro == 'ubuntu2204' || matrix.distro == 'ubuntu2204_nocuda'
shell: bash
run: |
set -euo pipefail
apt-get update
apt-get install -y wget gpg ca-certificates
wget -qO- https://apt.kitware.com/keys/kitware-archive-latest.asc \
| gpg --dearmor \
| tee /usr/share/keyrings/kitware-archive-keyring.gpg > /dev/null
echo "deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ jammy main" \
| tee /etc/apt/sources.list.d/kitware.list > /dev/null
apt-get update
apt-get install -y cmake=3.26.* cmake-data=3.26.* kitware-archive-keyring
cmake --version
- name: Build packages
- name: Setup build (cmake)
shell: bash
run: |
mkdir -p build
cd build
cmake -G Ninja -DJFJOCH_INSTALL_DRIVER_SOURCE=ON -DJFJOCH_VIEWER_BUILD=ON -DCMAKE_BUILD_TYPE=Release ${{ matrix.cmake_flags }} ..
- name: Build packages
shell: bash
run: |
cd build
ninja frontend
ninja -j16 package
- name: Upload packages
@@ -133,6 +122,57 @@ jobs:
python3 gitea_upload_file.py "$file"
done
fi
processing-test:
name: Data processing test
runs-on: jfjoch_rocky9
steps:
- uses: actions/checkout@v4
- name: Build processing test
shell: bash
run: |
mkdir -p build
cd build
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja -j16 jfjoch_hdf5_test
- name: Run DIALS processing on legacy format
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
mkdir -p test01
cd test01
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -o legacy
xia2.ssx image=legacy_master.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
- name: Run DIALS processing on VDS (master + 4 linked image files)
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
mkdir -p test02
cd test02
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -f25 -V -o vds
xia2.ssx image=vds_master.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
- name: Run DIALS processing on single file format
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
mkdir -p test03
cd test03
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -S -o single
xia2.ssx image=single.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
- name: Run XDS with Durin and VDS format
shell: bash
run: |
cd tests/xds_durin
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -f10 -V
/opt/xds/xds_par
test -f IDXREF.LP
- name: Run XDS with Durin and single HDF5 format
shell: bash
run: |
cd tests/xds_durin
rm *.h5
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -S
/opt/xds/xds_par
test -f IDXREF.LP
python-client:
name: Generate python client
runs-on: jfjoch_rocky8

View File

@@ -175,7 +175,7 @@ ENDIF()
IF (NOT JFJOCH_WRITER_ONLY)
ADD_CUSTOM_COMMAND(OUTPUT frontend/dist/index.html
COMMAND npm install
COMMAND npm ci
COMMAND npm run build
COMMAND npm run redocly
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/frontend)

View File

@@ -795,7 +795,7 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
org::openapitools::server::model::File_writer_format ret;
switch (input) {
case FileWriterFormat::DataOnly:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NONE);
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXONLYDATA);
break;
case FileWriterFormat::NXmxLegacy:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXLEGACY);
@@ -803,6 +803,9 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
case FileWriterFormat::NXmxVDS:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXVDS);
break;
case FileWriterFormat::NXmxIntegrated:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXINTEGRATED);
break;
case FileWriterFormat::CBF:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::CBF);
break;
@@ -820,12 +823,14 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
FileWriterFormat Convert(const org::openapitools::server::model::File_writer_format& input) {
switch (input.getValue()) {
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NONE:
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXONLYDATA:
return FileWriterFormat::DataOnly;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXLEGACY:
return FileWriterFormat::NXmxLegacy;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXVDS:
return FileWriterFormat::NXmxVDS;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXINTEGRATED:
return FileWriterFormat::NXmxIntegrated;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::CBF:
return FileWriterFormat::CBF;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::TIFF:

View File

@@ -75,8 +75,8 @@ void to_json(nlohmann::json& j, const File_writer_format& o)
case File_writer_format::eFile_writer_format::INVALID_VALUE_OPENAPI_GENERATED:
j = "INVALID_VALUE_OPENAPI_GENERATED";
break;
case File_writer_format::eFile_writer_format::NONE:
j = "None";
case File_writer_format::eFile_writer_format::NXMXONLYDATA:
j = "NXmxOnlyData";
break;
case File_writer_format::eFile_writer_format::NXMXLEGACY:
j = "NXmxLegacy";
@@ -84,6 +84,9 @@ void to_json(nlohmann::json& j, const File_writer_format& o)
case File_writer_format::eFile_writer_format::NXMXVDS:
j = "NXmxVDS";
break;
case File_writer_format::eFile_writer_format::NXMXINTEGRATED:
j = "NXmxIntegrated";
break;
case File_writer_format::eFile_writer_format::CBF:
j = "CBF";
break;
@@ -100,8 +103,8 @@ void from_json(const nlohmann::json& j, File_writer_format& o)
{
auto s = j.get<std::string>();
if (s == "None") {
o.setValue(File_writer_format::eFile_writer_format::NONE);
if (s == "NXmxOnlyData") {
o.setValue(File_writer_format::eFile_writer_format::NXMXONLYDATA);
}
else if (s == "NXmxLegacy") {
o.setValue(File_writer_format::eFile_writer_format::NXMXLEGACY);
@@ -109,6 +112,9 @@ void from_json(const nlohmann::json& j, File_writer_format& o)
else if (s == "NXmxVDS") {
o.setValue(File_writer_format::eFile_writer_format::NXMXVDS);
}
else if (s == "NXmxIntegrated") {
o.setValue(File_writer_format::eFile_writer_format::NXMXINTEGRATED);
}
else if (s == "CBF") {
o.setValue(File_writer_format::eFile_writer_format::CBF);
}

View File

@@ -12,7 +12,7 @@
/*
* File_writer_format.h
*
* None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
* NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
*/
#ifndef File_writer_format_H_
@@ -25,7 +25,7 @@ namespace org::openapitools::server::model
{
/// <summary>
/// None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
/// NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
/// </summary>
class File_writer_format
{
@@ -38,9 +38,10 @@ public:
// Avoiding name clashes with user defined
// enum values
INVALID_VALUE_OPENAPI_GENERATED = 0,
NONE,
NXMXONLYDATA,
NXMXLEGACY,
NXMXVDS,
NXMXINTEGRATED,
CBF,
TIFF,
NOFILEWRITTEN

View File

@@ -642,17 +642,22 @@ components:
file_writer_format:
type: string
enum:
- "None"
- "NXmxOnlyData"
- "NXmxLegacy"
- "NXmxVDS"
- "NXmxIntegrated"
- "CBF"
- "TIFF"
- "NoFileWritten"
default: "NXmxLegacy"
description: |
None - no master file written
NoFileWritten - no files are written at all
NXmxOnlyData - only data files are written, no master file
NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia
NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
NXmxIntegrated - single HDF5 per dataset
CBF - CBF format (limited metadata)
TIFF - TIFF format (no metadata)
file_writer_settings:
type: object
properties:

File diff suppressed because one or more lines are too long

View File

@@ -1083,7 +1083,8 @@ DiffractionExperiment &DiffractionExperiment::ImagesPerFile(int64_t input) {
int64_t DiffractionExperiment::GetImagesPerFile() const {
auto tmp = dataset.GetImagesPerFile();
if (tmp == 0)
if (tmp == 0
|| file_writer.GetHDF5MasterFormatVersion() == FileWriterFormat::NXmxIntegrated)
return GetImageNum();
else
return tmp;

View File

@@ -14,6 +14,7 @@ FileWriterSettings &FileWriterSettings::HDF5MasterFormatVersion(FileWriterFormat
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
case FileWriterFormat::TIFF:
case FileWriterFormat::CBF:
case FileWriterFormat::NoFile:

View File

@@ -33,7 +33,7 @@ enum class FileWriterFormat : int {
DataOnly = 0,
NXmxLegacy = 1,
NXmxVDS = 2,
// TODO: NXmxTR = 3
NXmxIntegrated = 3,
CBF = 4,
TIFF = 5,
NoFile = 6

View File

@@ -8,6 +8,9 @@ ARG EIGEN_TAG=3.4.0
ARG LIBJPEG_TURBO_VERSION=3.1.2
ARG LIBTIFF_VERSION=v4.7.1
ARG HDF5_TAG="hdf5_1.14.6"
ARG DIALS_VERSION=3.27.0
ARG DIALS_TARBALL_URL=https://github.com/dials/dials/releases/download/v3.27.0/dials-v3-27-0-linux-x86_64.tar.xz
ARG DIALS_PREFIX=/opt
# Update base, enable CRB (RHEL/Rocky 9), and install toolchain + Qt deps
RUN dnf -y update && \
@@ -65,6 +68,8 @@ RUN dnf -y update && \
libdrm-devel \
libglvnd-core-devel \
libglvnd-devel \
glibc-langpack-en \
glibc-locale-source \
freetype-devel && \
dnf clean all && rm -rf /var/cache/dnf
@@ -185,9 +190,35 @@ RUN set -eux; \
cmake --install .; \
cd /; rm -rf /tmp/qt-everywhere-src-${QT_VERSION} /tmp/qt-everywhere-src-${QT_VERSION}.tar.xz
# Install DIALS using the official binary tarball workflow
RUN set -eux; \
cd /tmp; \
curl -fL -o dials.tar.xz "${DIALS_TARBALL_URL}"; \
tar -xJf dials.tar.xz; \
cd dials-installer; \
./install --prefix="${DIALS_PREFIX}"; \
cd /; \
rm -rf /tmp/dials.tar.xz /tmp/dials-*-linux-x86_64
RUN set -eux; \
mkdir -p /opt/xds; \
cd /tmp; \
curl -fL -o XDS-gfortran_Linux_x86_64.tar.gz https://xds.mr.mpg.de/XDS-gfortran_Linux_x86_64.tar.gz; \
tar -xzf XDS-gfortran_Linux_x86_64.tar.gz; \
cp -a /tmp/XDS-gfortran_Linux_x86_64/. /opt/xds/; \
curl -fL -o durin-plugin-rhel7-x86.zip https://github.com/DiamondLightSource/durin/releases/download/v2023-10/durin-plugin-rhel7-x86.zip; \
unzip durin-plugin-rhel7-x86.zip; \
mv durin-plugin.so /opt/xds/durin-plugin.so; \
chmod 755 /opt/xds/durin-plugin.so; \
rm -rf /tmp/XDS-gfortran_Linux_x86_64 /tmp/XDS-gfortran_Linux_x86_64.tar.gz /tmp/durin-plugin-rhel7-x86.zip
# Make Qt and Eigen discoverable by CMake
ENV CMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH}:/opt/hdf5-${HDF5_TAG}-static:/opt/qt-${QT_VERSION}-static
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US:en
ENV LC_ALL=en_US.UTF-8
# Set workdir for your project
WORKDIR /workspace

View File

@@ -22,11 +22,11 @@ RUN set -eux; \
ca-certificates \
curl \
wget \
gpg \
git \
tar \
xz-utils \
build-essential \
cmake \
ninja-build \
python3 \
python3-requests \
@@ -78,10 +78,25 @@ RUN set -eux; \
libassimp-dev \
libglvnd-dev \
libfreetype6-dev; \
wget -qO- https://apt.kitware.com/keys/kitware-archive-latest.asc \
| gpg --dearmor \
| tee /usr/share/keyrings/kitware-archive-keyring.gpg > /dev/null; \
echo "deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ jammy main" \
| tee /etc/apt/sources.list.d/kitware.list > /dev/null; \
apt-get update; \
apt-get install -y --no-install-recommends \
cmake=3.26.* \
cmake-data=3.26.* \
kitware-archive-keyring; \
apt-get -y install gcc-12 g++-12; \
apt-get clean; \
rm -rf /var/lib/apt/lists/*
# Use GCC/G++ 12 for builds
ENV CC=/usr/bin/gcc-12
ENV CXX=/usr/bin/g++-12
ENV PATH=/usr/bin:${PATH}
# Build a static OpenSSL
RUN set -eux; \
cd /tmp; \
@@ -166,11 +181,6 @@ RUN set -eux; \
ENV CMAKE_PREFIX_PATH=/opt/libtiff-${LIBTIFF_VERSION}-static:/opt/libjpeg-turbo-${LIBJPEG_TURBO_VERSION}-static
ENV PKG_CONFIG_PATH=/opt/hdf5-${HDF5_TAG}-static/lib/pkgconfig:/opt/libjpeg-turbo-${LIBJPEG_TURBO_VERSION}-static/lib/pkgconfig:/opt/libtiff-${LIBTIFF_VERSION}-static/lib/pkgconfig:${OPENSSL_ROOT_DIR}/lib/pkgconfig:${OPENSSL_ROOT_DIR}/lib64/pkgconfig
# Use GCC/G++ 12 for builds
ENV CC=/usr/bin/gcc-12
ENV CXX=/usr/bin/g++-12
ENV PATH=/usr/bin:${PATH}
# Build and install static Qt 6.9 with Core, Gui, Widgets, Charts, DBus
ARG QT_PREFIX=/opt/qt-${QT_VERSION}-static
RUN set -eux; \

View File

@@ -1,15 +1,17 @@
# FileWriterFormat
None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
## Enum
* `NONE` (value: `'None'`)
* `NXMXONLYDATA` (value: `'NXmxOnlyData'`)
* `NXMXLEGACY` (value: `'NXmxLegacy'`)
* `NXMXVDS` (value: `'NXmxVDS'`)
* `NXMXINTEGRATED` (value: `'NXmxIntegrated'`)
* `CBF` (value: `'CBF'`)
* `TIFF` (value: `'TIFF'`)

View File

@@ -963,6 +963,7 @@ namespace {
switch (tmp) {
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxIntegrated:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::CBF:
case FileWriterFormat::TIFF:
@@ -1022,7 +1023,7 @@ namespace {
if (j.contains("overwrite"))
message.overwrite = j["overwrite"];
if (j.contains("xfel_pulse_id"))
message.overwrite = j["xfel_pulse_id"];
message.xfel_pulse_id = j["xfel_pulse_id"];
if (j.contains("file_format"))
message.file_format = ProcessHDF5Format(j["file_format"]);
if (j.contains("poni_rot1"))

View File

@@ -30,8 +30,8 @@ function stringToEnum(value: string): file_writer_format {
(v) => v === value
) as file_writer_format;
// If no match is found, default to file_writer_format.NONE
return enumValue || file_writer_format.NONE;
// If no match is found, default to file_writer_format.NXMX_ONLY_DATA
return enumValue || file_writer_format.NXMX_ONLY_DATA;
}
@@ -109,7 +109,8 @@ class FileWriterSettings extends React.Component<MyProps, MyState> {
>
<MenuItem value={file_writer_format.NXMX_LEGACY}>NXmx HDF5 master file with soft links (DECTRIS file writer compatibility)</MenuItem>
<MenuItem value={file_writer_format.NXMX_VDS}>NXmx HDF5 master file with virtual datasets</MenuItem>
<MenuItem value={file_writer_format.NONE}>No NXmx HDF5 master file (only data files)</MenuItem>
<MenuItem value={file_writer_format.NXMX_INTEGRATED}>Single HDF5 file with data and metadata</MenuItem>
<MenuItem value={file_writer_format.NXMX_ONLY_DATA}>No NXmx HDF5 master file (only data files)</MenuItem>
<MenuItem value={file_writer_format.CBF}>miniCBF (only data files; limited metadata)</MenuItem>
<MenuItem value={file_writer_format.TIFF}>TIFF (only data files; no metadata)</MenuItem>
<MenuItem value={file_writer_format.NO_FILE_WRITTEN}>No files saved</MenuItem>

View File

@@ -4,15 +4,20 @@
/* eslint-disable */
/**
* None - no master file written
* NoFileWritten - no files are written at all
* NXmxOnlyData - only data files are written, no master file
* NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia
* NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
* NXmxIntegrated - single HDF5 per dataset
* CBF - CBF format (limited metadata)
* TIFF - TIFF format (no metadata)
*
*/
export enum file_writer_format {
NONE = 'None',
NXMX_ONLY_DATA = 'NXmxOnlyData',
NXMX_LEGACY = 'NXmxLegacy',
NXMX_VDS = 'NXmxVDS',
NXMX_INTEGRATED = 'NXmxIntegrated',
CBF = 'CBF',
TIFF = 'TIFF',
NO_FILE_WRITTEN = 'NoFileWritten',

View File

@@ -7,7 +7,6 @@
#include <thread>
#include <iostream>
#include <iomanip>
#include <algorithm>
#include <cmath>
#include <limits>
@@ -18,7 +17,6 @@
#include <vector>
#include "../../common/ResolutionShells.h"
#include "../../common/Logger.h"
namespace {
struct HKLKey {
@@ -237,7 +235,6 @@ namespace {
int img_id = 0;
int hkl_slot = -1;
double sigma = 0.0;
mutable bool selected = true;
};
struct CorrectedObs {
@@ -246,103 +243,6 @@ namespace {
double sigma_corr;
};
void select_reflections_by_quasi_random(const std::vector<ObsRef> &obs,
const ScaleMergeOptions &opt,
std::vector<bool> &hkl_selected,
Logger &logger) {
float stat_d_min = std::numeric_limits<float>::max();
float stat_d_max = 0.0f;
const gemmi::SpaceGroup &sg = *opt.space_group;
const gemmi::GroupOps gops = sg.operations();
int n_operator = gops.order();
struct HKLStats {
int n = 0;
float d = std::numeric_limits<float>::max();
int shell_id = 0;
};
const int nhkl = static_cast<int>(hkl_selected.size());
std::vector<HKLStats> per_hkl(nhkl);
int reflection_above_cutoff = 0;
for (const auto &o: obs) {
if (o.r->I / o.r->sigma < opt.filter_sigma_cutoff) {
o.selected = false;
continue;
}
const auto d = o.r->d;
reflection_above_cutoff += 1;
if (std::isfinite(d) && d > 0.0f) {
if (opt.d_min_limit_A > 0.0 && d < static_cast<float>(opt.d_min_limit_A))
continue;
stat_d_min = std::min(stat_d_min, d);
stat_d_max = std::max(stat_d_max, d);
auto &hs = per_hkl[o.hkl_slot];
hs.n += 1;
hs.d = d;
}
}
logger.Info("Reflections of I/sigma > {} : {}", opt.filter_sigma_cutoff, reflection_above_cutoff);
if (reflection_above_cutoff < opt.filter_min_per_bin * opt.filter_n_resolution_bins) {
logger.Info("No additional selection applied before scaling");
return;
}
if (stat_d_min < stat_d_max && stat_d_min > 0.0f) {
const float d_min_pad = stat_d_min * 0.999f;
const float d_max_pad = stat_d_max * 1.001f;
ResolutionShells scaling_shells(d_min_pad, d_max_pad, opt.filter_n_resolution_bins);
for (int h = 0; h < nhkl; ++h) {
const auto d = per_hkl[h].d;
if (std::isfinite(d) && d > 0.0f) {
if (opt.d_min_limit_A > 0.0 && d < static_cast<float>(opt.d_min_limit_A))
continue;
auto s = scaling_shells.GetShell(d);
if (s.has_value())
per_hkl[h].shell_id = s.value();
}
}
// Accumulators per shell
struct ShellAccum {
int obs_unique = 0;
int obs_total = 0;
bool selected = true;
};
std::vector<ShellAccum> shell_acc(opt.filter_n_resolution_bins);
for (int h = 0; h < nhkl; ++h) {
auto &sa = shell_acc[per_hkl[h].shell_id];
if (sa.obs_unique * n_operator > opt.filter_min_per_bin * 1.2 || sa.obs_total > opt.filter_max_per_bin)
hkl_selected[h] = false;
else
sa.obs_unique += 1;
sa.obs_total += per_hkl[h].n;
}
const auto shell_min_res = scaling_shells.GetShellMinRes();
logger.Info("| d-mean | n_refl_uni | n_refl_tot | selection |");
for (int n=0; n < opt.filter_n_resolution_bins; ++n) {
if (shell_acc[n].obs_unique * n_operator < opt.filter_min_per_bin)
shell_acc[n].selected = false;
if (shell_min_res[n] <= 0.0f) continue;
logger.Info("| {:6.3f} | {:10d} | {:10d} | {:9d} |",
shell_min_res[n], shell_acc[n].obs_unique, shell_acc[n].obs_total, shell_acc[n].selected);
}
for (int h = 0; h < nhkl; ++h) {
auto &sa = shell_acc[per_hkl[h].shell_id];
if (!sa.selected)
hkl_selected[h] = false;
}
}
}
void scale(const ScaleMergeOptions &opt,
std::vector<double> &g,
std::vector<double> &mosaicity,
@@ -350,13 +250,10 @@ namespace {
const std::vector<uint8_t> &image_slot_used,
bool rotation_crystallography,
size_t nhkl,
const std::vector<ObsRef> &obs,
bool selection,
Logger &logger) {
const std::vector<ObsRef> &obs) {
ceres::Problem problem;
std::vector<double> Itrue(nhkl, 0.0);
std::vector<bool> hkl_selected(nhkl, true);
// Initialize Itrue from per-HKL median of observed intensities
{
@@ -380,12 +277,8 @@ namespace {
double wedge = opt.wedge_deg.value_or(0.0);
if (selection) select_reflections_by_quasi_random(obs, opt, hkl_selected, logger);
std::vector<bool> is_valid_hkl_slot(nhkl, false);
for (const auto &o: obs) {
if (!o.selected) continue;
if (!hkl_selected[o.hkl_slot]) continue;
switch (opt.partiality_model) {
case ScaleMergeOptions::PartialityModel::Rotation: {
auto *cost = new ceres::AutoDiffCostFunction<IntensityRotResidual, 1, 1, 1, 1, 1>(
@@ -505,9 +398,8 @@ namespace {
options.function_tolerance = 1e-4;
ceres::Solver::Summary summary;
logger.Info("Now start the ceres-solver with residual blocks: {}", problem.NumResidualBlocks());
ceres::Solve(options, &problem, &summary);
logger.Info(summary.FullReport());
std::cout << summary.FullReport() << std::endl;
}
void merge(size_t nhkl, ScaleMergeResult &out, const std::vector<CorrectedObs> &corr_obs) {
@@ -802,7 +694,6 @@ namespace {
o.img_id = img_id;
o.hkl_slot = hkl_slot;
o.sigma = sigma;
o.selected = true;
obs.push_back(o);
}
}
@@ -812,14 +703,10 @@ namespace {
ScaleMergeResult ScaleAndMergeReflectionsCeres(const std::vector<std::vector<Reflection> > &observations,
const ScaleMergeOptions &opt) {
Logger logger("ScaleAndMergeReflections");
if (opt.image_cluster <= 0)
throw std::invalid_argument("image_cluster must be positive");
const bool rotation_crystallography = opt.wedge_deg.has_value();
const bool selection = opt.selection;
size_t nrefl = 0;
for (const auto &i: observations)
@@ -857,8 +744,7 @@ ScaleMergeResult ScaleAndMergeReflectionsCeres(const std::vector<std::vector<Ref
}
}
logger.Info("Now scale the reflections: {}", nrefl);
scale(opt, g, mosaicity, R_sq, image_slot_used, rotation_crystallography, nhkl, obs, selection, logger);
scale(opt, g, mosaicity, R_sq, image_slot_used, rotation_crystallography, nhkl, obs);
ScaleMergeResult out;

View File

@@ -50,12 +50,6 @@ struct ScaleMergeOptions {
bool refine_wedge = false;
bool selection = true;
double filter_sigma_cutoff = 1.0;
int filter_min_per_bin = 10000; // needs optimization
int filter_max_per_bin = 80000;
int filter_n_resolution_bins = 20;
enum class PartialityModel {Fixed, Rotation, Unity, Still} partiality_model = PartialityModel::Fixed;
};

View File

@@ -1,6 +1,8 @@
#!/bin/bash
python3.12 -m venv tmp_venv/
set -euo pipefail
python3.11 -m venv tmp_venv/
source tmp_venv/bin/activate
pip install -r docs/requirements.txt

View File

@@ -13,6 +13,64 @@
using namespace std::literals::chrono_literals;
TEST_CASE("HDF5Group_create_reopen_and_fail", "[HDF5][Unit]") {
{
HDF5File file("scratch_group_reopen.h5");
REQUIRE_NOTHROW(HDF5Group(file, "/group1"));
REQUIRE(file.Exists("/group1"));
REQUIRE_NOTHROW(HDF5Group(file, "/group1"));
REQUIRE(file.Exists("/group1"));
REQUIRE_THROWS(HDF5Group(file, "/missing_parent/group2"));
}
remove("scratch_group_reopen.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Attr_string_update", "[HDF5][Unit]") {
const std::string first_value = "abc";
const std::string second_value = "a much longer attribute value";
{
HDF5File file("scratch_attr_string_update.h5");
REQUIRE_NOTHROW(file.Attr("str_attr", first_value));
REQUIRE_NOTHROW(file.Attr("str_attr", second_value));
}
{
HDF5ReadOnlyFile file("scratch_attr_string_update.h5");
REQUIRE(file.ReadAttrStr("str_attr") == second_value);
}
remove("scratch_attr_string_update.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Attr_int64_update", "[HDF5][Unit]") {
const int64_t first_value = -1234567890123LL;
const int64_t second_value = 9876543210123LL;
{
HDF5File file("scratch_attr_int64_update.h5");
REQUIRE_NOTHROW(file.Attr("int_attr", first_value));
REQUIRE(file.ReadAttrInt("int_attr") == first_value);
REQUIRE_NOTHROW(file.Attr("int_attr", second_value));
REQUIRE(file.ReadAttrInt("int_attr") == second_value);
}
{
HDF5ReadOnlyFile file("scratch_attr_int64_update.h5");
REQUIRE(file.ReadAttrInt("int_attr") == second_value);
}
remove("scratch_attr_int64_update.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5DataSet_scalar", "[HDF5][Unit]") {
uint16_t tmp_scalar = 16788;
{
@@ -759,6 +817,274 @@ TEST_CASE("HDF5Writer_Link_VDS", "[HDF5][Full]") {
REQUIRE (H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Writer_NXmxIntegrated_Basic", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).ImagesPerFile(2).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_basic");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
// NXmxIntegrated forces all images into one file
REQUIRE(x.GetImagesPerFile() == x.GetImageNum());
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
REQUIRE(start_message.file_format == FileWriterFormat::NXmxIntegrated);
// images_per_file should equal total images for integrated
REQUIRE(start_message.images_per_file == x.GetImageNum());
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 42);
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
// All images in one file — only one stats entry
REQUIRE(stats.size() == 1);
REQUIRE(stats[0].total_images == x.GetImageNum());
}
// Single integrated file, no separate master or data files
REQUIRE(!std::filesystem::exists("integrated_basic.h5"));
REQUIRE(std::filesystem::exists("integrated_basic_master.h5"));
REQUIRE(!std::filesystem::exists("integrated_basic_data_000001.h5"));
{
HDF5ReadOnlyFile file("integrated_basic_master.h5");
// Data should be directly in the file
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
HDF5DataSpace file_space(*dataset);
REQUIRE(file_space.GetNumOfDimensions() == 3);
REQUIRE(file_space.GetDimensions()[0] == x.GetImageNum());
REQUIRE(file_space.GetDimensions()[1] == x.GetYPixelsNum());
REQUIRE(file_space.GetDimensions()[2] == x.GetXPixelsNum());
// Master metadata should also be present
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/instrument/detector/beam_center_x"));
REQUIRE(dataset->ReadScalar<float>() == Catch::Approx(x.GetBeamX_pxl()));
// No external links (unlike NXmxLegacy)
REQUIRE_THROWS(std::make_unique<HDF5DataSet>(file, "/entry/data/data_000001"));
}
// No leftover HDF5 objects
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_basic_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_WithSpots", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(3).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_spots");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 10);
std::vector<SpotToSave> spots;
spots.push_back({10.0f, 20.0f, 100.0f});
spots.push_back({30.0f, 40.0f, 200.0f});
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.spots = spots;
message.number = i;
message.image_collection_efficiency = 1.0f;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
}
REQUIRE(std::filesystem::exists("integrated_spots_master.h5"));
{
HDF5ReadOnlyFile file("integrated_spots_master.h5");
// Detector plugin data should exist in the same file
REQUIRE(file.Exists("/entry/detector"));
// Image data should exist
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_spots_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_ZeroImages", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_zero");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = 0;
FileWriter writer(start_message);
// Write no images — just finalize
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
// No data files created
REQUIRE(stats.empty());
}
// Master file should still exist with metadata
REQUIRE(std::filesystem::exists("integrated_zero_master.h5"));
{
HDF5ReadOnlyFile file("integrated_zero_master.h5");
REQUIRE(file.Exists("/entry"));
// No data dataset since no images written
REQUIRE_THROWS(std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_zero_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_AzInt", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.DetectorDistance_mm(50).BeamX_pxl(500).BeamY_pxl(500);
x.QSpacingForAzimInt_recipA(0.1).QRangeForAzimInt_recipA(0.1, 4.0);
x.ImagesPerTrigger(3).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_azint");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
PixelMask pixel_mask(x);
AzimuthalIntegration mapping(x, pixel_mask);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
start_message.az_int_bin_to_q = mapping.GetBinToQ();
start_message.az_int_phi_bin_count = mapping.GetAzimuthalBinCount();
start_message.az_int_q_bin_count = mapping.GetQBinCount();
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 5);
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.az_int_profile = std::vector<float>(mapping.GetBinNumber(), static_cast<float>(i));
message.number = i;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
}
REQUIRE(std::filesystem::exists("integrated_azint_master.h5"));
{
HDF5ReadOnlyFile file("integrated_azint_master.h5");
// Azimuthal integration bin mapping should exist (written by plugin)
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/azint/bin_to_q"));
// Per-image azint data should exist
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/azint/image"));
HDF5DataSpace space(*dataset);
REQUIRE(space.GetNumOfDimensions() == 3);
REQUIRE(space.GetDimensions()[0] == x.GetImageNum());
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_azint_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_OutOfOrder", "[HDF5][Full]") {
// Test that out-of-order image delivery works with NXmxIntegrated
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_ooo");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 7);
// Write images out of order
std::vector<int> order = {3, 1, 4, 0, 2};
for (int idx : order) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = idx;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
REQUIRE(stats[0].total_images == 5);
}
REQUIRE(std::filesystem::exists("integrated_ooo_master.h5"));
{
HDF5ReadOnlyFile file("integrated_ooo_master.h5");
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
HDF5DataSpace file_space(*dataset);
REQUIRE(file_space.GetDimensions()[0] == 5);
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_ooo_master.h5");
}
TEST_CASE("HDF5Writer_NoMasterFile", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));

View File

@@ -1749,4 +1749,87 @@ TEST_CASE("JFJochIntegrationTest_TCP_calibration", "[JFJochReceiver]") {
REQUIRE(!service.GetProgress().has_value());
REQUIRE_NOTHROW(writer_future.get());
}
}
TEST_CASE("JFJochIntegrationTest_TCP_lysozyme_spot_and_index_single_file", "[JFJochReceiver]") {
Logger logger(Catch::getResultCapture().getCurrentTestName());
RegisterHDF5Filter();
const uint16_t nthreads = 4;
DiffractionExperiment experiment(DetJF4M());
experiment.ImagesPerTrigger(5).NumTriggers(1).UseInternalPacketGenerator(true).ImagesPerFile(2)
.FilePrefix("lyso_test_tcp_single_file").JungfrauConvPhotonCnt(false).SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true)
.DetectorDistance_mm(75).BeamY_pxl(1136).BeamX_pxl(1090).IncidentEnergy_keV(12.4)
.SetUnitCell(UnitCell{.a = 36.9, .b = 78.95, .c = 78.95, .alpha =90, .beta = 90, .gamma = 90});
experiment.SampleTemperature_K(123.0).RingCurrent_mA(115);
PixelMask pixel_mask(experiment);
// Load example image
HDF5ReadOnlyFile data("../../tests/test_data/compression_benchmark.h5");
HDF5DataSet dataset(data, "/entry/data/data");
HDF5DataSpace file_space(dataset);
REQUIRE(file_space.GetDimensions()[2] == experiment.GetXPixelsNum());
REQUIRE(file_space.GetDimensions()[1] == experiment.GetYPixelsNum());
std::vector<int16_t> image_conv (file_space.GetDimensions()[1] * file_space.GetDimensions()[2]);
std::vector<hsize_t> start = {4,0,0};
std::vector<hsize_t> file_size = {1, file_space.GetDimensions()[1], file_space.GetDimensions()[2]};
dataset.ReadVector(image_conv, start, file_size);
std::vector<int16_t> image_raw_geom(experiment.GetModulesNum() * RAW_MODULE_SIZE);
ConvertedToRawGeometry(experiment, image_raw_geom.data(), image_conv.data());
logger.Info("Loaded image");
// Setup acquisition device
AcquisitionDeviceGroup aq_devices;
std::unique_ptr<HLSSimulatedDevice> test = std::make_unique<HLSSimulatedDevice>(0, 64);
for (int m = 0; m < experiment.GetModulesNum(); m++)
test->SetInternalGeneratorFrame((uint16_t *) image_raw_geom.data() + m * RAW_MODULE_SIZE, m);
aq_devices.Add(std::move(test));
TCPStreamPusher pusher("tcp://127.0.0.1:9121", 1);
TCPImagePuller puller("tcp://127.0.0.1:9121");
StreamWriter writer(logger, puller);
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
JFJochReceiverService service(aq_devices, logger, pusher);
service.NumThreads(nthreads);
service.Indexing(experiment.GetIndexingSettings());
// No progress value at the start of measurement
REQUIRE(!service.GetProgress().has_value());
SpotFindingSettings settings = DiffractionExperiment::DefaultDataProcessingSettings();
settings.signal_to_noise_threshold = 2.5;
settings.photon_count_threshold = 5;
settings.min_pix_per_spot = 1;
settings.max_pix_per_spot = 200;
settings.high_resolution_limit = 2.0;
settings.low_resolution_limit = 50.0;
service.SetSpotFindingSettings(settings);
service.Start(experiment, pixel_mask, nullptr);
auto receiver_out = service.Stop();
CHECK(receiver_out.efficiency == 1.0);
REQUIRE(receiver_out.status.indexing_rate);
CHECK(receiver_out.status.indexing_rate.value() == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(receiver_out.writer_err.empty());
CHECK(!receiver_out.status.cancelled);
// No progress value at the end of measurement
REQUIRE(!service.GetProgress().has_value());
REQUIRE_NOTHROW(writer_future.get());
auto ack = pusher.GetImagesWritten();
REQUIRE(ack.has_value());
CHECK(ack == experiment.GetImageNum());
}

View File

@@ -12,7 +12,7 @@ ORGY= 1130
DETECTOR_DISTANCE= 75
OSCILLATION_RANGE= 0.088
X-RAY_WAVELENGTH= 1.0
NAME_TEMPLATE_OF_DATA_FRAMES=writing_test_??????.h5
NAME_TEMPLATE_OF_DATA_FRAMES=single.h5
DATA_RANGE= 1 25
!DATA_RANGE_FIXED_SCALE_FACTOR= 1900 6000 1.0
SPOT_RANGE= 1 25

View File

@@ -18,26 +18,51 @@ int main(int argc, char **argv) {
RegisterHDF5Filter();
if ((argc < 2) || (argc > 4)) {
std::cout << "Usage: ./jfjoch_hdf5_test <JF4M hdf5 file> {{<#images>} <rate in Hz>}" << std::endl;
std::cout << std::endl;
std::cout << "Env. variables:" << std::endl;
std::cout << "HDF5DATASET_WRITE_TEST_PREFIX" << std::endl;
std::cout << "HDF5MASTER_NEW_FORMAT" << std::endl;
int64_t nimages_out = 100;
std::string prefix = "writing_test";
FileWriterFormat format = FileWriterFormat::NXmxLegacy;
std::optional<int64_t> images_per_file;
std::optional<float> rotation;
int opt;
while ((opt = getopt(argc, argv, "o:n:Vf:R:S")) != -1) {
switch (opt) {
case 'o':
prefix = optarg;
break;
case 'n':
nimages_out = atoll(optarg);
break;
case 'V':
format = FileWriterFormat::NXmxVDS;
break;
case 'S':
format = FileWriterFormat::NXmxIntegrated;
break;
case 'R':
rotation = atof(optarg);
break;
case 'f':
images_per_file = atoll(optarg);
if (images_per_file.value() <= 0) {
std::cerr << "Invalid number of images per file: " << optarg << std::endl;
exit(EXIT_FAILURE);
}
images_per_file = atoll(optarg);
break;
default:
std::cout << "Usage: ./jfjoch_hdf5_test <JF4M hdf5 file> [-o <prefix>] [-n <num images>] [-V] [-f <num images per file>] [-R<rotation angle>]" << std::endl;
exit(EXIT_FAILURE);
}
}
if (optind >= argc) {
std::cout << "Usage: ./jfjoch_hdf5_test <JF4M hdf5 file> [-o <prefix>] [-n <num images>] [-V] [-f <num images per file>] [-R<rotation angle>]" << std::endl;
exit(EXIT_FAILURE);
}
int64_t nimages_out = 100;
double rate = 2200;
if (argc >= 3)
nimages_out = atoi(argv[2]);
if (argc >= 4)
rate = atof(argv[3]);
std::chrono::microseconds period_us((rate == 0) ? 0 : (int64_t) (1.0e6 / rate));
HDF5ReadOnlyFile data(argv[1]);
HDF5ReadOnlyFile data(argv[optind]);
HDF5DataSet dataset(data, "/entry/data/data");
HDF5DataSpace file_space(dataset);
@@ -53,6 +78,8 @@ int main(int argc, char **argv) {
x.BeamX_pxl(1090).BeamY_pxl(1136).DetectorDistance_mm(75).IncidentEnergy_keV(WVL_1A_IN_KEV);
x.MaskModuleEdges(true);
x.MaskChipEdges(true);
if (rotation && rotation.value() != 0.0)
x.Goniometer(GoniometerAxis("omega", 0, rotation.value(), Coord(-1,0,0), std::nullopt));
if ((file_space.GetDimensions()[1] == 2164) && (file_space.GetDimensions()[2] == 2068)) {
std::cout << "JF4M with gaps detected (2068 x 2164)" << std::endl;
@@ -64,16 +91,12 @@ int main(int argc, char **argv) {
logger.Info("Number of images in the original dataset: " + std::to_string(nimages));
// Set file name
if (std::getenv("HDF5DATASET_WRITE_TEST_PREFIX") == nullptr)
x.FilePrefix("writing_test");
else
x.FilePrefix(std::getenv("HDF5DATASET_WRITE_TEST_PREFIX"));
x.FilePrefix(prefix);
x.SetFileWriterFormat(format);
x.OverwriteExistingFiles(true);
if (std::getenv("HDF5MASTER_NEW_FORMAT") != nullptr) {
std::cout << "Using new format for HDF5 master file" << std::endl;
x.SetFileWriterFormat(FileWriterFormat::NXmxVDS);
} else
x.SetFileWriterFormat(FileWriterFormat::NXmxLegacy);
if (images_per_file.has_value())
x.ImagesPerFile(images_per_file.value());
x.ImagesPerTrigger(nimages);
@@ -127,8 +150,6 @@ int main(int argc, char **argv) {
size_t total_image_size = 0;
for (int i = 0; i < nimages_out; i++) {
std::this_thread::sleep_until(start_time + i * period_us);
DataMessage message{};
message.image = CompressedImage(output[i % nimages], x.GetXPixelsNum(), x.GetYPixelsNum(),
x.GetImageMode(), x.GetCompressionAlgorithm());

View File

@@ -2,7 +2,6 @@
// SPDX-License-Identifier: GPL-3.0-only
#include <iostream>
#include <iomanip>
#include <vector>
#include <string>
#include <unistd.h>
@@ -45,15 +44,12 @@ void print_usage(Logger &logger) {
logger.Info(" -d<num> High resolution limit for spot finding (default: 1.5)");
logger.Info(" -D<num> High resolution limit for scaling/merging (default: 0.0; no limit)");
logger.Info(" -S<num> Space group number");
logger.Info(" -M[num] Scale and merge (refine mosaicity) and write scaled.hkl + image.dat, unless indexing rate is below threshold (default: 0.5)");
logger.Info(" -M Scale and merge (refine mosaicity) and write scaled.hkl + image.dat");
logger.Info(" -P<txt> Partiality refinement fixed|rot|unity (default: fixed)");
logger.Info(" -A Anomalous mode (don't merge Friedel pairs)");
logger.Info(" -C<cell> Fix reference unit cell: -C\"a,b,c,alpha,beta,gamma\" (comma-separated, no spaces; quotes optional)");
logger.Info(" -c<num> Max spot count (default: 250)");
logger.Info(" -W<txt> HDF5 file with analysis results is written. 'l' or 'light' deactivates image-output");
logger.Info(" -T<num> Noise sigma level for spot finding (default: 3.0)");
logger.Info(" -m<num> Min unique reflections per bin used for scaling. Use all when no value is specified (default: 10000)");
logger.Info(" -w Refine wedge in scaling (default: false)");
logger.Info(" -W HDF5 file with analysis results is written");
}
void trim_in_place(std::string& t) {
@@ -64,36 +60,6 @@ void trim_in_place(std::string& t) {
t = t.substr(b, e - b);
};
void print_statistics(Logger &logger, const MergeStatistics &stats) {
logger.Info("");
logger.Info(" {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>10s}",
"d_min", "N_obs", "N_uniq", "Rmeas", "<I/sig>", "Complete");
logger.Info(" {:->8s} {:->8s} {:->8s} {:->8s} {:->8s} {:->10s}",
"", "", "", "", "", "");
for (const auto &sh: stats.shells) {
if (sh.unique_reflections == 0)
continue;
std::string compl_str = (sh.completeness > 0.0)
? fmt::format("{:8.1f}%", sh.completeness * 100.0)
: " N/A";
logger.Info(" {:8.2f} {:8d} {:8d} {:8.3f}% {:8.1f} {:>10s}",
sh.d_min, sh.total_observations, sh.unique_reflections,
sh.rmeas * 100, sh.mean_i_over_sigma, compl_str);
}
{
const auto &ov = stats.overall;
logger.Info(" {:->8s} {:->8s} {:->8s} {:->8s} {:->8s} {:->10s}",
"", "", "", "", "", "");
std::string compl_str = (ov.completeness > 0.0)
? fmt::format("{:8.1f}%", ov.completeness * 100.0)
: " N/A";
logger.Info(" {:>8s} {:8d} {:8d} {:8.3f}% {:8.1f} {:>10s}",
"Overall", ov.total_observations, ov.unique_reflections,
ov.rmeas * 100, ov.mean_i_over_sigma, compl_str);
}
logger.Info("");
}
std::optional<UnitCell> parse_unit_cell_arg(const char* arg) {
if (!arg)
return std::nullopt;
@@ -166,14 +132,8 @@ int main(int argc, char **argv) {
std::optional<int> space_group_number;
std::optional<UnitCell> fixed_reference_unit_cell;
bool write_output = false;
bool write_output_noimage = false;
bool filtering = true;
std::optional<int16_t> filter_min_per_bin;
std::optional<int64_t> max_spot_count_override;
float sigma_spot_finding = 3.0;
std::optional<float> merging_threshold;
IndexingAlgorithmEnum indexing_algorithm = IndexingAlgorithmEnum::Auto;
bool refine_wedge = false;
ScaleMergeOptions::PartialityModel partiality_model = ScaleMergeOptions::PartialityModel::Fixed;
@@ -186,7 +146,7 @@ int main(int argc, char **argv) {
}
int opt;
while ((opt = getopt(argc, argv, "o:N:s:e:vc:R::FX:xd:S:M::P:AD:C:T:W:m::w")) != -1) {
while ((opt = getopt(argc, argv, "o:N:s:e:vc:R::FX:xd:S:MP:AD:C:W")) != -1) {
switch (opt) {
case 'o':
output_prefix = optarg;
@@ -202,10 +162,6 @@ int main(int argc, char **argv) {
break;
case 'W':
write_output = true;
if (strcmp(optarg, "light") == 0 || strcmp(optarg, "l") == 0) {
write_output_noimage = true;
logger.Warning("Image data will not be saved.");
}
break;
case 'v':
verbose = true;
@@ -248,14 +204,6 @@ int main(int argc, char **argv) {
case 'x':
refine_beam_center = false;
break;
case 'm':
filtering = false;
if (optarg)
filter_min_per_bin = atoi(optarg);
break;
case 'w':
refine_wedge = true;
break;
case 'D':
d_min_scale_merge = atof(optarg);
logger.Info("High resolution limit for scaling/merging set to {:.2f} A", d_min_spot_finding);
@@ -265,15 +213,10 @@ int main(int argc, char **argv) {
break;
case 'M':
run_scaling = true;
if (optarg) merging_threshold = atof(optarg);
break;
case 'A':
anomalous_mode = true;
break;
case 'T':
sigma_spot_finding = atof(optarg);
logger.Info("Noise threshold level for spot finding set to {:.2f} sigma", sigma_spot_finding);
break;
case 'C': {
auto uc = parse_unit_cell_arg(optarg);
if (!uc.has_value()) {
@@ -353,11 +296,8 @@ int main(int argc, char **argv) {
experiment.OverwriteExistingFiles(true);
experiment.PolarizationFactor(0.99);
if (fixed_reference_unit_cell.has_value()) {
if (fixed_reference_unit_cell.has_value())
experiment.SetUnitCell(*fixed_reference_unit_cell);
} else {
experiment.SetUnitCell({});
}
if (max_spot_count_override.has_value()) {
experiment.MaxSpotCount(max_spot_count_override.value());
@@ -368,8 +308,6 @@ int main(int argc, char **argv) {
IndexingSettings indexing_settings;
indexing_settings.Algorithm(indexing_algorithm);
indexing_settings.RotationIndexing(rotation_indexing);
if (rotation_indexing)
logger.Info("Rotation indexing is activated.");
if (rotation_indexing_range.has_value())
indexing_settings.RotationIndexingMinAngularRange_deg(rotation_indexing_range.value());
@@ -379,27 +317,10 @@ int main(int argc, char **argv) {
indexing_settings.GeomRefinementAlgorithm(GeomRefinementAlgorithmEnum::None);
experiment.ImportIndexingSettings(indexing_settings);
switch (experiment.GetIndexingAlgorithm()) {
case IndexingAlgorithmEnum::FFBIDX:
logger.Info("Indexer used: FFBIDX");
break;
case IndexingAlgorithmEnum::FFTW:
logger.Info("Indexer used: FFTW");
break;
case IndexingAlgorithmEnum::FFT:
logger.Info("Indexer used: FFT (CUDA)");
break;
case IndexingAlgorithmEnum::None:
logger.Warning("Indexer not defined!");
return 0;
default: ;
}
SpotFindingSettings spot_settings;
spot_settings.enable = true;
spot_settings.indexing = true;
spot_settings.high_resolution_limit = d_min_spot_finding;
spot_settings.signal_to_noise_threshold = sigma_spot_finding;
if (d_min_scale_merge > 0)
spot_settings.high_resolution_limit = d_min_spot_finding;
@@ -475,10 +396,6 @@ int main(int argc, char **argv) {
compressed_buffer.resize(MaxCompressedSize(experiment.GetCompressionAlgorithm(),
experiment.GetPixelsNum(),
experiment.GetByteDepthImage()));
auto size = compressor.Compress(compressed_buffer.data(),
compressed_buffer.data(),
experiment.GetPixelsNum(),
sizeof(uint8_t));
// Thread-local analysis resources
MXAnalysisWithoutFPGA analysis(experiment, mapping, pixel_mask, indexer);
@@ -529,11 +446,10 @@ int main(int argc, char **argv) {
auto image_end_time = std::chrono::high_resolution_clock::now();
std::chrono::duration<float> image_duration = image_end_time - image_start_time;
if (!write_output_noimage)
size = compressor.Compress(compressed_buffer.data(),
img->Image().data(),
experiment.GetPixelsNum(),
sizeof(int32_t));
auto size = compressor.Compress(compressed_buffer.data(),
img->Image().data(),
experiment.GetPixelsNum(),
sizeof(int32_t));
msg.image = CompressedImage(compressed_buffer.data(),
size, experiment.GetXPixelsNum(),
@@ -566,7 +482,7 @@ int main(int argc, char **argv) {
}
// Progress log
if ((current_idx_offset > 0 && (current_idx_offset+1) % 100 == 0) || image_idx == end_image - 1) {
if (current_idx_offset > 0 && current_idx_offset % 100 == 0) {
std::optional<float> indexing_rate;
{
std::lock_guard<std::mutex> lock(plots_mutex);
@@ -575,21 +491,11 @@ int main(int argc, char **argv) {
if (indexing_rate.has_value()) {
logger.Info("Processed {} / {} images (indexing rate {:.1f}%)",
current_idx_offset+1, images_to_process,
current_idx_offset, images_to_process,
indexing_rate.value() * 100.0f);
} else {
logger.Info("Processed {} / {} images (indexing rate N/A)",
current_idx_offset+1, images_to_process);
}
if (image_idx == end_image - 1) {
if (!merging_threshold.has_value()) merging_threshold = 0.5f;
if (!indexing_rate.has_value()) {
run_scaling = false;
} else if (indexing_rate.value() < merging_threshold) {
run_scaling = false;
logger.Warning("Not proceed to scale and merge with lower indexing rate: {:.1f}%",
indexing_rate.value()*100.0f);
}
current_idx_offset, images_to_process);
}
}
}
@@ -641,6 +547,7 @@ int main(int argc, char **argv) {
logger.Info("Rotation Indexing found lattice");
}
// --- Optional: run scaling (mosaicity refinement) on accumulated reflections ---
// --- Optional: run scaling (mosaicity refinement) on accumulated reflections ---
if (run_scaling) {
logger.Info("Running scaling (mosaicity refinement) ...");
@@ -651,15 +558,6 @@ int main(int argc, char **argv) {
scale_opts.max_solver_time_s = 240.0; // generous cutoff for now
scale_opts.merge_friedel = !anomalous_mode;
scale_opts.d_min_limit_A = d_min_scale_merge.value_or(0.0);
scale_opts.refine_wedge = refine_wedge;
if (filter_min_per_bin.has_value()) {
scale_opts.selection = true;
scale_opts.filter_min_per_bin = filter_min_per_bin.value();
} else {
scale_opts.selection = filtering;
}
if (rotation_indexing)
scale_opts.filter_min_per_bin = scale_opts.filter_min_per_bin * 0.25;
const bool fixed_space_group = space_group || experiment.GetGemmiSpaceGroup().has_value();
@@ -667,17 +565,12 @@ int main(int argc, char **argv) {
scale_opts.space_group = *space_group;
else
scale_opts.space_group = experiment.GetGemmiSpaceGroup();
if (scale_opts.space_group->number == 0) scale_opts.space_group = *gemmi::find_spacegroup_by_number(1);
logger.Info("Starting SG-no.: {}", scale_opts.space_group->number);
auto scale_start = std::chrono::steady_clock::now();
auto scale_result = indexer.ScaleRotationData(scale_opts);
auto scale_end = std::chrono::steady_clock::now();
double scale_time = std::chrono::duration<double>(scale_end - scale_start).count();
if (scale_result) print_statistics(logger, scale_result->statistics);
// if (scale_opts.wedge_deg.has_value()) logger.Info("Refined wedge: {:.3f}", scale_opts.wedge_deg.value());
if (scale_result && !fixed_space_group) {
logger.Info("Searching for space group from P1-merged reflections ...");
@@ -686,16 +579,13 @@ int main(int argc, char **argv) {
sg_opts.centering = '\0';
sg_opts.merge_friedel = !anomalous_mode;
sg_opts.d_min_limit_A = d_min_scale_merge.value_or(0.0);
sg_opts.min_i_over_sigma = 4.0; // 0.0; follows the DIALS's default
sg_opts.min_i_over_sigma = 0.0;
sg_opts.min_operator_cc = 0.80;
sg_opts.min_pairs_per_operator = 20;
sg_opts.min_total_compared = 100;
sg_opts.test_systematic_absences = true;
auto sg_search_start = std::chrono::steady_clock::now();
const auto sg_search = SearchSpaceGroup(scale_result->merged, sg_opts);
auto sg_search_end = std::chrono::steady_clock::now();
double sg_search_time = std::chrono::duration<double>(sg_search_end - sg_search_start).count();
logger.Info("");
{
@@ -709,24 +599,17 @@ int main(int argc, char **argv) {
logger.Info("");
if (sg_search.best_space_group.has_value()) {
logger.Info("SG-search wall-clock time: {:.2f} s", sg_search_time);
if (sg_search.best_space_group->number != 0) {
if (sg_search.best_space_group->number != scale_opts.space_group->number) {
logger.Info("Re-running scaling in detected space group {}", sg_search.best_space_group->short_name());
logger.Info("Re-running scaling in detected space group {}", sg_search.best_space_group->short_name());
scale_opts.space_group = *sg_search.best_space_group;
scale_opts.space_group = *sg_search.best_space_group;
auto rescale_start = std::chrono::steady_clock::now();
auto refined_scale_result = indexer.ScaleRotationData(scale_opts);
auto rescale_end = std::chrono::steady_clock::now();
auto rescale_start = std::chrono::steady_clock::now();
auto refined_scale_result = indexer.ScaleRotationData(scale_opts);
auto rescale_end = std::chrono::steady_clock::now();
if (refined_scale_result) {
scale_result = std::move(refined_scale_result);
scale_time += std::chrono::duration<double>(rescale_end - rescale_start).count();
}
} else {
logger.Info("No space group update indicated.");
}
if (refined_scale_result) {
scale_result = std::move(refined_scale_result);
scale_time += std::chrono::duration<double>(rescale_end - rescale_start).count();
}
} else {
logger.Warning("No space group accepted; keeping P1-merged result");
@@ -740,7 +623,36 @@ int main(int argc, char **argv) {
scale_time, scale_result->merged.size());
// Print resolution-shell statistics table
print_statistics(logger, scale_result->statistics);
{
const auto &stats = scale_result->statistics;
logger.Info("");
logger.Info(" {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>10s}",
"d_min", "N_obs", "N_uniq", "Rmeas", "<I/sig>", "Complete");
logger.Info(" {:->8s} {:->8s} {:->8s} {:->8s} {:->8s} {:->10s}",
"", "", "", "", "", "");
for (const auto &sh: stats.shells) {
if (sh.unique_reflections == 0)
continue;
std::string compl_str = (sh.completeness > 0.0)
? fmt::format("{:8.1f}%", sh.completeness * 100.0)
: " N/A";
logger.Info(" {:8.2f} {:8d} {:8d} {:8.3f}% {:8.1f} {:>10s}",
sh.d_min, sh.total_observations, sh.unique_reflections,
sh.rmeas * 100, sh.mean_i_over_sigma, compl_str);
}
{
const auto &ov = stats.overall;
logger.Info(" {:->8s} {:->8s} {:->8s} {:->8s} {:->8s} {:->10s}",
"", "", "", "", "", "");
std::string compl_str = (ov.completeness > 0.0)
? fmt::format("{:8.1f}%", ov.completeness * 100.0)
: " N/A";
logger.Info(" {:>8s} {:8d} {:8d} {:8.3f}% {:8.1f} {:>10s}",
"Overall", ov.total_observations, ov.unique_reflections,
ov.rmeas * 100, ov.mean_i_over_sigma, compl_str);
}
logger.Info("");
}
{
const std::string img_path = output_prefix + "_image.dat";
@@ -786,8 +698,6 @@ int main(int argc, char **argv) {
cif_meta.unit_cell = rotation_indexer_ret->lattice.GetUnitCell();
} else if (experiment.GetUnitCell().has_value()) {
cif_meta.unit_cell = experiment.GetUnitCell().value();
} else {
logger.Warning("No UnitCell output");
}
if (scale_opts.space_group.has_value()) {
@@ -835,11 +745,12 @@ int main(int argc, char **argv) {
double frame_rate = static_cast<double>(images_to_process) / processing_time;
logger.Info("");
logger.Info("Processing time (excl. scaling): {:8.2f} s", processing_time);
logger.Info("Frame rate: {:8.2f} Hz", frame_rate);
logger.Info("Total throughput: {:8.2f} MB/s", throughput_MBs);
logger.Info("Processing time: {:.2f} s", processing_time);
logger.Info("Frame rate: {:.2f} Hz", frame_rate);
logger.Info("Total throughput:{:.2f} MB/s", throughput_MBs);
// Print extended stats similar to Receiver
if (end_msg.indexing_rate.has_value()) {
if (!end_msg.indexing_rate.has_value()) {
logger.Info("Indexing rate: {:.2f}%", end_msg.indexing_rate.value() * 100.0);
}

View File

@@ -25,6 +25,7 @@ FileWriter::FileWriter(const StartMessage &request)
switch (format) {
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
CreateHDF5MasterFile(request);
break;
case FileWriterFormat::CBF:
@@ -42,6 +43,7 @@ void FileWriter::Write(const DataMessage &msg) {
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
WriteHDF5(msg);
break;
case FileWriterFormat::CBF:
@@ -70,8 +72,8 @@ void FileWriter::WriteHDF5(const DataMessage& msg) {
if (msg.number < 0)
throw JFJochException(JFJochExceptionCategory::ArrayOutOfBounds, "No support for negative images");
const uint64_t file_number = msg.number / start_message.images_per_file;
const uint64_t image_number = msg.number % start_message.images_per_file;
const uint64_t file_number = (start_message.images_per_file == 0) ? 0 : msg.number / start_message.images_per_file;
const uint64_t image_number = (start_message.images_per_file == 0) ? msg.number : msg.number % start_message.images_per_file;
if (closed_files.contains(file_number))
return;
@@ -79,9 +81,11 @@ void FileWriter::WriteHDF5(const DataMessage& msg) {
if (files.size() <= file_number)
files.resize(file_number + 1);
if (!files[file_number])
if (!files[file_number]) {
files[file_number] = std::make_unique<HDF5DataFile>(start_message, file_number);
if (format == FileWriterFormat::NXmxIntegrated && master_file)
files[file_number]->CreateFile(msg, master_file->GetFile());
}
files[file_number]->Write(msg, image_number);
if (files[file_number]->GetNumImages() == start_message.images_per_file) {
@@ -121,13 +125,13 @@ void FileWriter::CloseOldFiles(uint64_t current_image_number) {
std::vector<HDF5DataFileStatistics> FileWriter::Finalize() {
std::lock_guard<std::mutex> lock(hdf5_mutex);
if (master_file)
master_file.reset();
for (uint64_t f = 0; f < files.size(); ++f) {
if (files[f] && !closed_files.contains(f))
CloseFile(f);
}
if (master_file)
master_file.reset();
return stats;
}

View File

@@ -77,7 +77,7 @@ std::optional<HDF5DataFileStatistics> HDF5DataFile::Close() {
}
data_file.reset();
if (!std::filesystem::exists(filename.c_str()) || overwrite)
if (manage_file && (!std::filesystem::exists(filename.c_str()) || overwrite))
std::rename(tmp_filename.c_str(), filename.c_str());
closed = true;
@@ -102,7 +102,7 @@ HDF5DataFile::~HDF5DataFile() {
}
}
void HDF5DataFile::CreateFile(const DataMessage& msg) {
void HDF5DataFile::CreateFile(const DataMessage& msg, std::shared_ptr<HDF5File> in_data_file) {
HDF5Dcpl dcpl;
HDF5DataType data_type(msg.image.GetMode());
@@ -130,7 +130,7 @@ void HDF5DataFile::CreateFile(const DataMessage& msg) {
break;
}
data_file = std::make_unique<HDF5File>(tmp_filename);
data_file = in_data_file;
HDF5Group(*data_file, "/entry").NXClass("NXentry");
HDF5Group(*data_file, "/entry/data").NXClass("NXdata");
@@ -149,11 +149,10 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) {
if (image_number >= images_per_file)
throw JFJochException(JFJochExceptionCategory::FileWriteError,
"Image number out of bounds");
bool new_file = false;
if (!data_file) {
CreateFile(msg);
new_file = true;
manage_file = true;
CreateFile(msg, std::make_shared<HDF5File>(tmp_filename));
}
if (new_file || (static_cast<int64_t>(image_number) > max_image_number)) {
@@ -161,6 +160,7 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) {
timestamp.resize(max_image_number + 1);
exptime.resize(max_image_number + 1);
number.resize(max_image_number + 1);
new_file = false;
}
nimages++;

View File

@@ -25,7 +25,7 @@ class HDF5DataFile {
std::string filename;
std::string tmp_filename;
std::unique_ptr<HDF5File> data_file = nullptr;
std::shared_ptr<HDF5File> data_file = nullptr;
std::unique_ptr<HDF5DataSet> data_set = nullptr;
std::unique_ptr<HDF5DataSet> data_set_image_number = nullptr;
std::vector<std::unique_ptr<HDF5DataFilePlugin>> plugins;
@@ -47,13 +47,16 @@ class HDF5DataFile {
bool overwrite = false;
int64_t file_number;
void CreateFile(const DataMessage& msg);
bool new_file = true;
bool manage_file = false;
public:
HDF5DataFile(const StartMessage &msg, uint64_t file_number);
~HDF5DataFile();
std::optional<HDF5DataFileStatistics> Close();
void Write(const DataMessage& msg, uint64_t image_number);
size_t GetNumImages() const;
void CreateFile(const DataMessage& msg, std::shared_ptr<HDF5File> data_file);
};
#endif //HDF5DATAFILE_H

View File

@@ -25,14 +25,13 @@ void HDF5DataFilePluginAzInt::OpenFile(HDF5File &data_file, const DataMessage &m
data_file.SaveVector("/entry/azint/bin_to_phi", az_int_bin_to_phi, dim);
az_int_image.reserve(images_per_file * azimuthal_bins * q_bins);
az_int_image.resize(msg.number * azimuthal_bins * q_bins);
}
void HDF5DataFilePluginAzInt::Write(const DataMessage &msg, uint64_t image_number) {
if (az_int_bin_to_q.empty() || q_bins <= 0 || azimuthal_bins <= 0)
return;
if (static_cast<int64_t>(image_number) >= max_image_number) {
if (image_number >= max_image_number || (max_image_number == 0)) {
max_image_number = image_number;
az_int_image.resize((max_image_number + 1) * azimuthal_bins * q_bins);
}

View File

@@ -11,9 +11,15 @@
#include "../common/time_utc.h"
#include "gemmi/symmetry.hpp"
namespace {
std::string GenFilename(const StartMessage &start) {
return fmt::format("{:s}_master.h5", start.file_prefix);
}
}
NXmx::NXmx(const StartMessage &start)
: start_message(start),
filename(start.file_prefix + "_master.h5") {
filename(GenFilename(start)) {
uint64_t tmp_suffix;
try {
if (!start.arm_date.empty())
@@ -31,7 +37,7 @@ NXmx::NXmx(const StartMessage &start)
bool v1_10 = (start.file_format == FileWriterFormat::NXmxVDS);
hdf5_file = std::make_unique<HDF5File>(tmp_filename, v1_10);
hdf5_file = std::make_shared<HDF5File>(tmp_filename, v1_10);
hdf5_file->Attr("file_name", filename);
hdf5_file->Attr("HDF5_Version", hdf5_version());
HDF5Group(*hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
@@ -52,6 +58,8 @@ NXmx::~NXmx() {
std::rename(tmp_filename.c_str(), filename.c_str());
}
std::string HDF5Metadata::DataFileName(const StartMessage &msg, int64_t file_number) {
if (file_number < 0)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
@@ -184,7 +192,7 @@ void NXmx::LinkToData_VDS(const StartMessage &start, const EndMessage &end) {
if (!start.az_int_bin_to_q.empty()) {
size_t azimuthal_bins = start.az_int_phi_bin_count.value_or(1);
size_t q_bins = start.az_int_q_bin_count.value_or(1);
if (q_bins > 0 & azimuthal_bins > 0) {
if (q_bins > 0 && azimuthal_bins > 0) {
VDS(start, "/entry/azint/image",
{total_images, azimuthal_bins, q_bins},
HDF5DataType(0.0f));
@@ -657,13 +665,17 @@ void NXmx::AzimuthalIntegration(const StartMessage &start, const EndMessage &end
HDF5Group az_int_group(*hdf5_file, "/entry/azint");
az_int_group.NXClass("NXcollection");
az_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q, dim)->Units("reciprocal Angstrom");
if (!start.az_int_bin_to_two_theta.empty())
az_int_group.SaveVector("bin_to_two_theta", start.az_int_bin_to_two_theta, dim)->Units("degrees");
if (!start.az_int_bin_to_phi.empty())
az_int_group.SaveVector("bin_to_phi", start.az_int_bin_to_phi, dim)->Units("degrees");
for (const auto &[x,y]: end.az_int_result)
az_int_group.SaveVector(x, y, dim);
if (start.file_format != FileWriterFormat::NXmxIntegrated) {
az_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q, dim)->Units("reciprocal Angstrom");
if (!start.az_int_bin_to_two_theta.empty())
az_int_group.SaveVector("bin_to_two_theta", start.az_int_bin_to_two_theta, dim)->Units("degrees");
if (!start.az_int_bin_to_phi.empty())
az_int_group.SaveVector("bin_to_phi", start.az_int_bin_to_phi, dim)->Units("degrees");
}
for (const auto &[x,y]: end.az_int_result) {
if (x != "image")
az_int_group.SaveVector(x, y, dim);
}
}
}
@@ -693,10 +705,17 @@ void NXmx::Finalize(const EndMessage &end) {
AzimuthalIntegration(start_message, end);
ADUHistogram(end);
if (start_message.file_format == FileWriterFormat::NXmxVDS)
LinkToData_VDS(start_message, end);
else
LinkToData(start_message, end);
switch (start_message.file_format.value_or(FileWriterFormat::NXmxLegacy)) {
case FileWriterFormat::NXmxLegacy:
LinkToData(start_message, end);
break;
case FileWriterFormat::NXmxVDS:
LinkToData_VDS(start_message, end);
break;
case FileWriterFormat::NXmxIntegrated:
default:
break;
}
if (end.rotation_lattice)
SaveVector(*hdf5_file, "/entry/MX/rotationLatticeIndexed", end.rotation_lattice->GetVector())
@@ -731,3 +750,7 @@ void NXmx::UserData(const StartMessage &start) {
}
}
}
std::shared_ptr<HDF5File> NXmx::GetFile() {
return hdf5_file;
}

View File

@@ -13,7 +13,7 @@ namespace HDF5Metadata {
}
class NXmx {
std::unique_ptr<HDF5File> hdf5_file;
std::shared_ptr<HDF5File> hdf5_file;
const StartMessage start_message;
const std::string filename;
std::string tmp_filename;
@@ -58,6 +58,8 @@ public:
NXmx& operator=(const NXmx &other) = delete;
void Finalize(const EndMessage &end);
void WriteCalibration(const CompressedImage &image);
std::shared_ptr<HDF5File> GetFile();
};
#endif //JUNGFRAUJOCH_HDF5NXMX_H

View File

@@ -315,76 +315,119 @@ void HDF5Fapl::SetVersionTo1p10orNewer() {
H5Pset_libver_bounds(id, H5F_LIBVER_V110, H5F_LIBVER_LATEST);
}
template <typename T>
static HDF5Object& WriteOrCreateScalarAttr(HDF5Object& object, const std::string& name, const T& val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = -1;
if (H5Aexists(object.GetID(), name.c_str()) > 0) {
attr_id = H5Aopen(object.GetID(), name.c_str(), H5P_DEFAULT);
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open attribute " + name);
hid_t existing_type = H5Aget_type(attr_id);
if (existing_type < 0) {
H5Aclose(attr_id);
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot get attribute type " + name);
}
const bool recreate =
(H5Tget_class(existing_type) != H5Tget_class(datatype.GetID())) ||
(H5Tget_size(existing_type) != H5Tget_size(datatype.GetID()));
H5Tclose(existing_type);
H5Aclose(attr_id);
if (recreate) {
if (H5Adelete(object.GetID(), name.c_str()) < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot delete attribute " + name);
attr_id = H5Acreate2(object.GetID(), name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
} else {
attr_id = H5Aopen(object.GetID(), name.c_str(), H5P_DEFAULT);
}
} else {
attr_id = H5Acreate2(object.GetID(), name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
}
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot create/open attribute " + name);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
return object;
}
HDF5Object & HDF5Object::Attr(const std::string &name, const std::string &val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
hid_t attr_id = -1;
if (H5Aexists(id, name.c_str()) > 0) {
attr_id = H5Aopen(id, name.c_str(), H5P_DEFAULT);
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open attribute " + name);
hid_t existing_type = H5Aget_type(attr_id);
if (existing_type < 0) {
H5Aclose(attr_id);
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot get attribute type " + name);
}
const bool recreate =
(H5Tget_class(existing_type) != H5T_STRING) ||
(H5Tget_size(existing_type) < val.length() + 1);
H5Tclose(existing_type);
H5Aclose(attr_id);
if (recreate) {
if (H5Adelete(id, name.c_str()) < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot delete attribute " + name);
attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
} else {
attr_id = H5Aopen(id, name.c_str(), H5P_DEFAULT);
}
} else {
attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
}
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot create/open attribute " + name);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), val.c_str());
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
if (ret < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
return *this;
}
HDF5Object & HDF5Object::Attr(const std::string &name, int32_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, uint32_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, int64_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, uint64_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, double val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, const std::vector<double> &val) {
@@ -632,14 +675,19 @@ HDF5Group::HDF5Group(const HDF5Object& parent, const std::string &name) : HDF5Gr
}
HDF5Group::HDF5Group(const HDF5Object& parent, const char *name) : HDF5Object() {
id = H5Gcreate(parent.GetID(), name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if (H5Lexists(parent.GetID(), name, H5P_DEFAULT) > 0)
id = H5Gopen(parent.GetID(), name, H5P_DEFAULT);
else
id = H5Gcreate(parent.GetID(), name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if (id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open/create HDF5 group " + std::string(name));
}
HDF5Group::~HDF5Group() {
H5Gclose(id);
}
HDF5File::HDF5File(const std::string& filename, bool v1_10) : HDF5Object() {
HDF5Fapl fapl;