Compare commits

...

24 Commits

Author SHA1 Message Date
1b611468b3 Update XDS tests to delete .LP files from old measurements
All checks were successful
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m48s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 14m50s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m17s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m41s
Build Packages / build:rpm (rocky8) (push) Successful in 17m58s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 18m22s
Build Packages / build:rpm (rocky9) (push) Successful in 18m32s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 19m16s
Build Packages / Generate python client (push) Successful in 1m58s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m0s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 2m6s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 10m38s
Build Packages / XDS test (durin plugin) (push) Successful in 10m3s
Build Packages / XDS test (JFJoch plugin) (push) Successful in 9m18s
Build Packages / XDS test (neggia plugin) (push) Successful in 9m12s
Build Packages / DIALS test (push) Successful in 13m51s
Build Packages / Unit tests (push) Successful in 1h0m45s
2026-04-05 09:29:23 +02:00
c4aaefbf2f XDS plugin: fix version provided to XDS to 0 0 0
All checks were successful
Build Packages / build:rpm (rocky9) (push) Successful in 19m5s
Build Packages / Generate python client (push) Successful in 1m3s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 9m26s
Build Packages / XDS test (JFJoch plugin) (push) Successful in 9m39s
Build Packages / DIALS test (push) Successful in 12m46s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 17m20s
Build Packages / XDS test (neggia plugin) (push) Successful in 9m8s
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 12m31s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m21s
Build Packages / build:rpm (rocky8) (push) Successful in 17m57s
Build Packages / Build documentation (push) Successful in 2m12s
Build Packages / Create release (push) Has been skipped
Build Packages / XDS test (durin plugin) (push) Successful in 8m33s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 9m18s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 14m55s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 19m4s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m24s
Build Packages / Unit tests (push) Successful in 1h1m3s
2026-04-04 22:43:44 +02:00
f5b1ad4b2d Documentation updates
Some checks failed
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m50s
Build Packages / Unit tests (push) Has been cancelled
Build Packages / XDS test (neggia plugin) (push) Successful in 8m45s
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m3s
Build Packages / build:rpm (rocky8) (push) Successful in 17m47s
Build Packages / Build documentation (push) Successful in 1m50s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m56s
Build Packages / DIALS test (push) Successful in 13m27s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 14m34s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m46s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 14m37s
Build Packages / build:rpm (rocky9) (push) Successful in 18m48s
Build Packages / Generate python client (push) Successful in 1m32s
Build Packages / XDS test (durin plugin) (push) Successful in 8m34s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m25s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 9m41s
Build Packages / XDS test (JFJoch plugin) (push) Failing after 8m11s
2026-04-04 22:06:53 +02:00
f8ef814ee3 Minor enhancements to make the plugin nice (print more information, add library version, upload RHEL8 compiled version to the repository) 2026-04-04 21:47:20 +02:00
c5605f5b0b XDS plugin: limit symbols visibility, so the library cannot export HDF5/Zstd or other things
All checks were successful
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m36s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 15m9s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 15m40s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m55s
Build Packages / build:rpm (rocky9) (push) Successful in 17m42s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m53s
Build Packages / build:rpm (rocky8) (push) Successful in 17m53s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m57s
Build Packages / Generate python client (push) Successful in 1m13s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m9s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 2m6s
Build Packages / XDS test (durin plugin) (push) Successful in 7m26s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 10m13s
Build Packages / XDS test (neggia plugin) (push) Successful in 9m15s
Build Packages / DIALS test (push) Successful in 11m48s
Build Packages / XDS test (JFJoch plugin) (push) Successful in 9m53s
Build Packages / Unit tests (push) Successful in 1h1m49s
2026-04-04 09:19:20 +02:00
f1ec8d9630 Use Kal's bitshuffle routine in decompression 2026-04-04 09:13:43 +02:00
811437b232 Integrated format writes to /entry/data/data + add XDS plugin
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m45s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 14m28s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m19s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m0s
Build Packages / build:rpm (rocky8) (push) Successful in 17m30s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m42s
Build Packages / build:rpm (rocky9) (push) Successful in 18m34s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m43s
Build Packages / Generate python client (push) Successful in 1m24s
Build Packages / Build documentation (push) Successful in 2m8s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (ubuntu2204) (push) Successful in 9m56s
Build Packages / XDS test (durin plugin) (push) Successful in 8m21s
Build Packages / XDS test (JFJoch plugin) (push) Failing after 7m55s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m31s
Build Packages / XDS test (neggia plugin) (push) Successful in 8m41s
Build Packages / DIALS test (push) Successful in 12m57s
Build Packages / Unit tests (push) Successful in 57m30s
2026-04-03 21:30:22 +02:00
ec2c73c9df CI: Fix Durin test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m15s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 14m6s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 14m18s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 15m7s
Build Packages / build:rpm (rocky8) (push) Successful in 17m32s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m41s
Build Packages / build:rpm (rocky9) (push) Successful in 18m3s
Build Packages / Generate python client (push) Successful in 38s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m24s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 1m33s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 9m58s
Build Packages / XDS test (durin plugin) (push) Failing after 7m59s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 9m57s
Build Packages / XDS test (neggia plugin) (push) Successful in 7m38s
Build Packages / DIALS test (push) Successful in 13m3s
Build Packages / Unit tests (push) Successful in 1h1m23s
2026-04-02 13:57:36 +02:00
a8bf9e741d Remove Durin/VDS test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m51s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 12m7s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 14m49s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 14m51s
Build Packages / build:rpm (rocky8) (push) Successful in 15m32s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 15m44s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 16m37s
Build Packages / Generate python client (push) Successful in 2m3s
Build Packages / Build documentation (push) Successful in 1m8s
Build Packages / XDS test (durin plugin) (push) Failing after 7m43s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (ubuntu2204) (push) Successful in 12m8s
Build Packages / build:rpm (rocky9) (push) Successful in 13m43s
Build Packages / XDS test (neggia plugin) (push) Successful in 10m21s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m54s
Build Packages / DIALS test (push) Successful in 14m35s
Build Packages / Unit tests (push) Has been cancelled
2026-04-02 12:51:20 +02:00
5a0d912ad7 Make integrated format more robust 2026-04-02 12:50:32 +02:00
fa4a391317 Another try with XDS - adding neggia and bitshuffle to docker container
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 13m36s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 15m4s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 16m24s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 16m34s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m51s
Build Packages / build:rpm (rocky8) (push) Successful in 17m54s
Build Packages / build:rpm (rocky9) (push) Successful in 18m20s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m57s
Build Packages / Generate python client (push) Successful in 1m32s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 1m50s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m50s
Build Packages / XDS test (durin plugin) (push) Failing after 9m4s
Build Packages / XDS test (neggia plugin) (push) Successful in 9m38s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m43s
Build Packages / DIALS test (push) Successful in 13m0s
Build Packages / Unit tests (push) Successful in 59m9s
2026-03-31 09:20:45 +02:00
e19712d797 CI: Separate DIALS and durin test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 12m44s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 13m35s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 13m51s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m40s
Build Packages / build:rpm (rocky8) (push) Successful in 17m35s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 18m6s
Build Packages / Generate python client (push) Successful in 44s
Build Packages / build:rpm (rocky9) (push) Successful in 18m43s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 19m0s
Build Packages / Build documentation (push) Successful in 1m18s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 9m33s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 8m53s
Build Packages / XDS test (durin plugin) (push) Failing after 6m16s
Build Packages / DIALS test (push) Successful in 12m29s
Build Packages / Unit tests (push) Has been cancelled
2026-03-31 08:06:30 +02:00
2e5a650a01 Fix XDS
Some checks failed
Build Packages / build:rpm (ubuntu2204) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404) (push) Has been cancelled
Build Packages / Data processing test (push) Has been cancelled
Build Packages / Generate python client (push) Has been cancelled
Build Packages / Build documentation (push) Has been cancelled
Build Packages / Unit tests (push) Has been cancelled
Build Packages / Create release (push) Has been cancelled
Build Packages / build:rpm (rocky9) (push) Has been cancelled
Build Packages / build:rpm (rocky9_sls9) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky8) (push) Has been cancelled
Build Packages / build:rpm (rocky9_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky8_sls9) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky8_nocuda) (push) Has been cancelled
2026-03-31 08:04:48 +02:00
aa72d1e59b Fix XDS test
Some checks failed
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m46s
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 16m4s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m4s
Build Packages / build:rpm (rocky8) (push) Successful in 16m29s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 16m56s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 17m6s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 17m23s
Build Packages / Generate python client (push) Successful in 1m11s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 1m41s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 11m25s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 12m47s
Build Packages / build:rpm (rocky9) (push) Successful in 13m6s
Build Packages / Data processing test (push) Failing after 13m47s
Build Packages / Unit tests (push) Successful in 1h1m48s
2026-03-30 18:35:39 +02:00
72613ca3a2 CI: Add one more XDS test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 13m55s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 15m30s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m44s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 17m12s
Build Packages / build:rpm (rocky8) (push) Successful in 17m25s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m40s
Build Packages / Generate python client (push) Successful in 1m13s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Failing after 58s
Build Packages / build:rpm (rocky9) (push) Successful in 19m6s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 19m15s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m54s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 9m4s
Build Packages / Data processing test (push) Failing after 11m16s
Build Packages / Unit tests (push) Successful in 58m18s
2026-03-30 14:54:00 +02:00
69c090bfa4 HDF5NXmx: Always use _master.h5 for consistency 2026-03-30 14:53:11 +02:00
e57a5e4f79 CI: Add XDS test
Some checks failed
Build Packages / build:rpm (ubuntu2204) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404) (push) Has been cancelled
Build Packages / Data processing test (push) Has been cancelled
Build Packages / Generate python client (push) Has been cancelled
Build Packages / build:rpm (rocky8_sls9) (push) Has been cancelled
Build Packages / Build documentation (push) Has been cancelled
Build Packages / Unit tests (push) Has been cancelled
Build Packages / Create release (push) Has been cancelled
Build Packages / build:rpm (rocky9) (push) Has been cancelled
Build Packages / build:rpm (rocky9_sls9) (push) Has been cancelled
Build Packages / build:rpm (rocky8) (push) Has been cancelled
Build Packages / build:rpm (rocky8_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky9_nocuda) (push) Has been cancelled
2026-03-30 14:44:07 +02:00
dfbadc4cb3 CI: Separate directories for each test
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 10m11s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Has started running
Build Packages / build:rpm (rocky8_sls9) (push) Has started running
Build Packages / build:rpm (rocky8) (push) Has started running
Build Packages / build:rpm (ubuntu2404) (push) Has been cancelled
Build Packages / DIALS processing test (push) Has been cancelled
Build Packages / Generate python client (push) Has been cancelled
Build Packages / Build documentation (push) Has been cancelled
Build Packages / Unit tests (push) Has been cancelled
Build Packages / Create release (push) Has been cancelled
Build Packages / build:rpm (rocky9_sls9) (push) Has been cancelled
Build Packages / build:rpm (ubuntu2204) (push) Has been cancelled
Build Packages / build:rpm (rocky9_nocuda) (push) Has been cancelled
Build Packages / build:rpm (rocky9) (push) Has been cancelled
2026-03-30 14:31:36 +02:00
baf36c9c0a CI: Fix empty section
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Successful in 11m16s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Successful in 12m22s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Successful in 16m5s
Build Packages / build:rpm (rocky9_nocuda) (push) Successful in 16m19s
Build Packages / Generate python client (push) Successful in 56s
Build Packages / build:rpm (rocky8) (push) Successful in 17m21s
Build Packages / build:rpm (rocky8_sls9) (push) Successful in 17m43s
Build Packages / Create release (push) Has been skipped
Build Packages / Build documentation (push) Successful in 50s
Build Packages / build:rpm (rocky9) (push) Successful in 18m27s
Build Packages / build:rpm (rocky9_sls9) (push) Successful in 18m43s
Build Packages / build:rpm (ubuntu2404) (push) Successful in 8m48s
Build Packages / build:rpm (ubuntu2204) (push) Successful in 10m54s
Build Packages / DIALS processing test (push) Successful in 11m26s
Build Packages / Unit tests (push) Has been cancelled
2026-03-30 13:52:44 +02:00
b91fbb03fb Frontend: Fix typo 2026-03-30 13:52:34 +02:00
7570794a60 Write single file option enabled. Minor issues:
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Failing after 6m2s
Build Packages / build:rpm (rocky9_nocuda) (push) Failing after 7m30s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Failing after 8m18s
Build Packages / DIALS processing test (push) Failing after 0s
Build Packages / build:rpm (rocky8_sls9) (push) Failing after 8m45s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Failing after 8m52s
Build Packages / Generate python client (push) Successful in 2m0s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky9_sls9) (push) Failing after 10m28s
Build Packages / Build documentation (push) Successful in 2m27s
Build Packages / build:rpm (rocky9) (push) Failing after 11m35s
Build Packages / build:rpm (rocky8) (push) Failing after 11m47s
Build Packages / build:rpm (ubuntu2204) (push) Failing after 7m45s
Build Packages / build:rpm (ubuntu2404) (push) Failing after 6m16s
Build Packages / Unit tests (push) Successful in 56m23s
1. What if write_master_file = false
2. Finalized file statistics use wrong name
2026-03-30 12:29:36 +02:00
6639b584ec Minor improvements to integrated NXmx workflow
Some checks failed
Build Packages / build:rpm (rocky8_nocuda) (push) Failing after 6m27s
Build Packages / build:rpm (rocky9_nocuda) (push) Failing after 6m48s
Build Packages / build:rpm (ubuntu2204_nocuda) (push) Failing after 8m21s
Build Packages / build:rpm (ubuntu2404_nocuda) (push) Failing after 9m18s
Build Packages / build:rpm (rocky8_sls9) (push) Failing after 9m20s
Build Packages / Generate python client (push) Successful in 49s
Build Packages / build:rpm (rocky9) (push) Failing after 10m21s
Build Packages / Create release (push) Has been skipped
Build Packages / build:rpm (rocky8) (push) Failing after 11m36s
Build Packages / Build documentation (push) Successful in 2m20s
Build Packages / build:rpm (rocky9_sls9) (push) Failing after 12m45s
Build Packages / build:rpm (ubuntu2404) (push) Failing after 7m55s
Build Packages / build:rpm (ubuntu2204) (push) Failing after 8m21s
Build Packages / DIALS processing test (push) Successful in 10m14s
Build Packages / Unit tests (push) Failing after 56m6s
2026-03-29 13:22:36 +02:00
cd0fa49f73 HDF5: Groups and attributes creation can be reused 2026-03-29 13:22:08 +02:00
91dd670043 OpenAPI: Add integrated NXmx file writer format 2026-03-28 20:07:25 +01:00
47 changed files with 1788 additions and 168 deletions

View File

@@ -110,7 +110,7 @@ jobs:
cd ..
if [ "${{ matrix.distro }}" = "rocky8_nocuda" ]; then
for file in build/jfjoch-viewer*.rpm build/jfjoch-writer*rpm; do
for file in build/jfjoch-viewer*.rpm build/jfjoch-writer*rpm build/xds-plugin/libjfjoch_xds_plugin.so.*; do
python3 gitea_upload_file.py "$file"
done
elif [ "${{ matrix.distro }}" = "rocky9_nocuda" ]; then
@@ -123,7 +123,7 @@ jobs:
done
fi
dials-test:
name: DIALS processing test
name: DIALS test
runs-on: jfjoch_rocky9
steps:
- uses: actions/checkout@v4
@@ -134,20 +134,112 @@ jobs:
cd build
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja -j16 jfjoch_hdf5_test
- name: Generate test data (with virtual data set and 4 linked image files)
shell: bash
run: |
set -euo pipefail
mkdir -p dials_test
cd dials_test
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -f25 -V
- name: Run DIALS processing
- name: Run DIALS processing on legacy format
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
set -euo pipefail
cd dials_test
xia2.ssx image=writing_test_master.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
mkdir -p test01
cd test01
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -o legacy
xia2.ssx image=legacy_master.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
- name: Run DIALS processing on VDS (master + 4 linked image files)
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
mkdir -p test02
cd test02
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -f25 -V -o vds
xia2.ssx image=vds_master.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
- name: Run DIALS processing on single file format
shell: bash
run: |
source /opt/dials-v3-27-0/dials_env.sh
mkdir -p test03
cd test03
../build/tools/jfjoch_hdf5_test ../tests/test_data/compression_benchmark.h5 -n100 -S -o single
xia2.ssx image=single.h5 space_group=P43212 unit_cell=78.551,78.551,36.914,90.000,90.000,90.000
xds-durin-test:
name: XDS test (durin plugin)
runs-on: jfjoch_rocky9
steps:
- uses: actions/checkout@v4
- name: Build processing test
shell: bash
run: |
mkdir -p build
cd build
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja -j16 jfjoch_hdf5_test
- name: Run XDS with Durin and legacy HDF5 format
shell: bash
run: |
cd tests/xds_durin
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -f10
/opt/xds/xds_par
test -f IDXREF.LP
xds-test:
name: XDS test (JFJoch plugin)
runs-on: jfjoch_rocky9
steps:
- uses: actions/checkout@v4
- name: Build processing test
shell: bash
run: |
mkdir -p build
cd build
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja -j16 jfjoch_hdf5_test
ninja -j16 jfjoch_xds_plugin
- name: Run XDS with legacy HDF5 format
shell: bash
run: |
cd tests/xds
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -f10
/opt/xds/xds_par
test -f IDXREF.LP
- name: Run XDS with VDS HDF5 format
shell: bash
run: |
cd tests/xds
rm -f *.h5 *.LP *.HKL
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -f10 -V
/opt/xds/xds_par
test -f IDXREF.LP
- name: Run XDS with single HDF5 format
shell: bash
run: |
cd tests/xds
rm -f *.h5 *.LP *.HKL
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -S
/opt/xds/xds_par
test -f IDXREF.LP
xds-neggia-test:
name: XDS test (neggia plugin)
runs-on: jfjoch_rocky9
steps:
- uses: actions/checkout@v4
- name: Build processing test
shell: bash
run: |
mkdir -p build
cd build
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja -j16 jfjoch_hdf5_test
- name: Run XDS with Neggia and legacy HDF5 format
shell: bash
run: |
cd tests/xds_neggia
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -f10
/opt/xds/xds_par
test -f IDXREF.LP
- name: Run XDS with Neggia and single HDF5 format
shell: bash
run: |
cd tests/xds_neggia
rm -f *.h5 *.LP *.HKL
../../build/tools/jfjoch_hdf5_test ../../tests/test_data/compression_benchmark.h5 -n25 -S
/opt/xds/xds_par
test -f IDXREF.LP
python-client:
name: Generate python client
runs-on: jfjoch_rocky8

View File

@@ -155,6 +155,7 @@ ADD_SUBDIRECTORY(detector_control)
ADD_SUBDIRECTORY(image_puller)
ADD_SUBDIRECTORY(preview)
ADD_SUBDIRECTORY(symmetry)
ADD_SUBDIRECTORY(xds-plugin)
IF (JFJOCH_WRITER_ONLY)
MESSAGE(STATUS "Compiling HDF5 writer only")

View File

@@ -795,7 +795,7 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
org::openapitools::server::model::File_writer_format ret;
switch (input) {
case FileWriterFormat::DataOnly:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NONE);
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXONLYDATA);
break;
case FileWriterFormat::NXmxLegacy:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXLEGACY);
@@ -803,6 +803,9 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
case FileWriterFormat::NXmxVDS:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXVDS);
break;
case FileWriterFormat::NXmxIntegrated:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXINTEGRATED);
break;
case FileWriterFormat::CBF:
ret.setValue(org::openapitools::server::model::File_writer_format::eFile_writer_format::CBF);
break;
@@ -820,12 +823,14 @@ org::openapitools::server::model::File_writer_format Convert(FileWriterFormat in
FileWriterFormat Convert(const org::openapitools::server::model::File_writer_format& input) {
switch (input.getValue()) {
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NONE:
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXONLYDATA:
return FileWriterFormat::DataOnly;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXLEGACY:
return FileWriterFormat::NXmxLegacy;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXVDS:
return FileWriterFormat::NXmxVDS;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::NXMXINTEGRATED:
return FileWriterFormat::NXmxIntegrated;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::CBF:
return FileWriterFormat::CBF;
case org::openapitools::server::model::File_writer_format::eFile_writer_format::TIFF:

View File

@@ -75,8 +75,8 @@ void to_json(nlohmann::json& j, const File_writer_format& o)
case File_writer_format::eFile_writer_format::INVALID_VALUE_OPENAPI_GENERATED:
j = "INVALID_VALUE_OPENAPI_GENERATED";
break;
case File_writer_format::eFile_writer_format::NONE:
j = "None";
case File_writer_format::eFile_writer_format::NXMXONLYDATA:
j = "NXmxOnlyData";
break;
case File_writer_format::eFile_writer_format::NXMXLEGACY:
j = "NXmxLegacy";
@@ -84,6 +84,9 @@ void to_json(nlohmann::json& j, const File_writer_format& o)
case File_writer_format::eFile_writer_format::NXMXVDS:
j = "NXmxVDS";
break;
case File_writer_format::eFile_writer_format::NXMXINTEGRATED:
j = "NXmxIntegrated";
break;
case File_writer_format::eFile_writer_format::CBF:
j = "CBF";
break;
@@ -100,8 +103,8 @@ void from_json(const nlohmann::json& j, File_writer_format& o)
{
auto s = j.get<std::string>();
if (s == "None") {
o.setValue(File_writer_format::eFile_writer_format::NONE);
if (s == "NXmxOnlyData") {
o.setValue(File_writer_format::eFile_writer_format::NXMXONLYDATA);
}
else if (s == "NXmxLegacy") {
o.setValue(File_writer_format::eFile_writer_format::NXMXLEGACY);
@@ -109,6 +112,9 @@ void from_json(const nlohmann::json& j, File_writer_format& o)
else if (s == "NXmxVDS") {
o.setValue(File_writer_format::eFile_writer_format::NXMXVDS);
}
else if (s == "NXmxIntegrated") {
o.setValue(File_writer_format::eFile_writer_format::NXMXINTEGRATED);
}
else if (s == "CBF") {
o.setValue(File_writer_format::eFile_writer_format::CBF);
}

View File

@@ -12,7 +12,7 @@
/*
* File_writer_format.h
*
* None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
* NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
*/
#ifndef File_writer_format_H_
@@ -25,7 +25,7 @@ namespace org::openapitools::server::model
{
/// <summary>
/// None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
/// NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
/// </summary>
class File_writer_format
{
@@ -38,9 +38,10 @@ public:
// Avoiding name clashes with user defined
// enum values
INVALID_VALUE_OPENAPI_GENERATED = 0,
NONE,
NXMXONLYDATA,
NXMXLEGACY,
NXMXVDS,
NXMXINTEGRATED,
CBF,
TIFF,
NOFILEWRITTEN

View File

@@ -642,17 +642,22 @@ components:
file_writer_format:
type: string
enum:
- "None"
- "NXmxOnlyData"
- "NXmxLegacy"
- "NXmxVDS"
- "NXmxIntegrated"
- "CBF"
- "TIFF"
- "NoFileWritten"
default: "NXmxLegacy"
description: |
None - no master file written
NoFileWritten - no files are written at all
NXmxOnlyData - only data files are written, no master file
NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia
NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
NXmxIntegrated - single HDF5 per dataset
CBF - CBF format (limited metadata)
TIFF - TIFF format (no metadata)
file_writer_settings:
type: object
properties:

File diff suppressed because one or more lines are too long

View File

@@ -26,11 +26,12 @@ MESSAGE(STATUS "Jungfraujoch version: ${PACKAGE_VERSION}")
CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/GitInfo.cpp.in" "${CMAKE_CURRENT_BINARY_DIR}/GitInfo.cpp" @ONLY)
ADD_LIBRARY(JFJochVersion STATIC
${CMAKE_CURRENT_BINARY_DIR}/GitInfo.cpp GitInfo.h)
ADD_LIBRARY(JFJochLogger STATIC
Logger.cpp Logger.h
${CMAKE_CURRENT_BINARY_DIR}/GitInfo.cpp GitInfo.h
)
TARGET_LINK_LIBRARIES(JFJochLogger PUBLIC spdlog::spdlog)
TARGET_LINK_LIBRARIES(JFJochLogger PUBLIC spdlog::spdlog JFJochVersion)
ADD_LIBRARY(JFJochZMQ STATIC ZMQWrappers.cpp ZMQWrappers.h)

View File

@@ -1083,7 +1083,8 @@ DiffractionExperiment &DiffractionExperiment::ImagesPerFile(int64_t input) {
int64_t DiffractionExperiment::GetImagesPerFile() const {
auto tmp = dataset.GetImagesPerFile();
if (tmp == 0)
if (tmp == 0
|| file_writer.GetHDF5MasterFormatVersion() == FileWriterFormat::NXmxIntegrated)
return GetImageNum();
else
return tmp;

View File

@@ -14,6 +14,7 @@ FileWriterSettings &FileWriterSettings::HDF5MasterFormatVersion(FileWriterFormat
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
case FileWriterFormat::TIFF:
case FileWriterFormat::CBF:
case FileWriterFormat::NoFile:

View File

@@ -33,7 +33,7 @@ enum class FileWriterFormat : int {
DataOnly = 0,
NXmxLegacy = 1,
NXmxVDS = 2,
// TODO: NXmxTR = 3
NXmxIntegrated = 3,
CBF = 4,
TIFF = 5,
NoFile = 6

View File

@@ -7,7 +7,7 @@
#include <iostream>
inline void print_license(const std::string &component_name) {
std::cout << component_name << " Copyright (C) 2024 Paul Scherrer Institute" << std::endl;
std::cout << component_name << " Copyright (C) 2019-2026 Paul Scherrer Institute" << std::endl;
std::cout << "This program comes with ABSOLUTELY NO WARRANTY" << std::endl;
std::cout << "This is free software, and you are welcome to redistribute it" << std::endl;
std::cout << "under certain conditions (GPLv3)" << std::endl;

View File

@@ -13,8 +13,7 @@ ADD_LIBRARY(Compression STATIC
JFJochDecompress.h
MaxCompressedSize.cpp
MaxCompressedSize.h)
set_target_properties(Compression PROPERTIES POSITION_INDEPENDENT_CODE ON)
TARGET_COMPILE_DEFINITIONS(Compression PUBLIC -DZSTD_SUPPORT -DUSE_ZSTD)
TARGET_LINK_LIBRARIES(Compression libzstd_static)
TARGET_INCLUDE_DIRECTORIES(Compression PUBLIC . zstd/lib)

View File

@@ -9,6 +9,9 @@
#include <bitshuffle/bitshuffle.h>
#include <bitshuffle/bitshuffle_internals.h>
#include <bitshuffle_hperf/bitshuffle.h>
#include <lz4/lz4.h>
#include <zstd.h>
#include "../compression/CompressionAlgorithmEnum.h"
@@ -16,15 +19,100 @@
#include "../common/CompressedImage.h"
extern "C" {
uint64_t bshuf_read_uint64_BE(void* buf);
uint64_t bshuf_read_uint64_BE(const void* buf);
};
inline size_t JFJochDecompressHperfPtr(uint8_t *output,
CompressionAlgorithm algorithm,
const uint8_t *source,
size_t source_size,
size_t nelements,
size_t elem_size,
size_t block_size) {
if ((algorithm != CompressionAlgorithm::BSHUF_LZ4) &&
(algorithm != CompressionAlgorithm::BSHUF_ZSTD) &&
(algorithm != CompressionAlgorithm::BSHUF_ZSTD_RLE))
throw JFJochException(JFJochExceptionCategory::Compression, "Algorithm not supported by hperf decompressor");
if ((block_size % BSHUF_BLOCKED_MULT) != 0)
throw JFJochException(JFJochExceptionCategory::Compression, "Invalid block size");
std::vector<char> decompressed_block(block_size * elem_size);
std::vector<char> scratch(block_size * elem_size);
const uint8_t *src_ptr = source;
uint8_t *dst_ptr = output;
const size_t num_full_blocks = nelements / block_size;
const size_t reminder_size = nelements - num_full_blocks * block_size;
const size_t last_block_size = reminder_size - reminder_size % BSHUF_BLOCKED_MULT;
auto decode_block = [&](size_t current_nelements) {
const auto compressed_size = static_cast<size_t>(bshuf_read_uint32_BE(src_ptr));
src_ptr += 4;
const size_t expected_size = current_nelements * elem_size;
size_t decompressed_size = 0;
switch (algorithm) {
case CompressionAlgorithm::BSHUF_LZ4: {
const int ret = LZ4_decompress_safe(reinterpret_cast<const char *>(src_ptr),
decompressed_block.data(),
static_cast<int>(compressed_size),
static_cast<int>(expected_size));
if (ret < 0 || static_cast<size_t>(ret) != expected_size)
throw JFJochException(JFJochExceptionCategory::Compression, "LZ4 decompression error");
decompressed_size = static_cast<size_t>(ret);
break;
}
case CompressionAlgorithm::BSHUF_ZSTD:
case CompressionAlgorithm::BSHUF_ZSTD_RLE: {
const size_t ret = ZSTD_decompress(decompressed_block.data(),
expected_size,
src_ptr,
compressed_size);
if (ZSTD_isError(ret) || ret != expected_size)
throw JFJochException(JFJochExceptionCategory::Compression, "ZSTD decompression error");
decompressed_size = ret;
break;
}
default:
throw JFJochException(JFJochExceptionCategory::Compression, "Algorithm not supported");
}
if (bitshuf_decode_block(reinterpret_cast<char *>(dst_ptr),
decompressed_block.data(),
scratch.data(),
current_nelements,
elem_size) < 0)
throw JFJochException(JFJochExceptionCategory::Compression, "bitshuffle_hperf decode error");
src_ptr += compressed_size;
dst_ptr += decompressed_size;
};
for (size_t i = 0; i < num_full_blocks; ++i)
decode_block(block_size);
if (last_block_size > 0)
decode_block(last_block_size);
const size_t leftover_bytes = (reminder_size % BSHUF_BLOCKED_MULT) * elem_size;
if (leftover_bytes > 0) {
memcpy(dst_ptr, src_ptr, leftover_bytes);
src_ptr += leftover_bytes;
}
return static_cast<size_t>(src_ptr - source);
}
inline void JFJochDecompressPtr(uint8_t *output,
CompressionAlgorithm algorithm,
const uint8_t *source,
size_t source_size,
size_t nelements,
size_t elem_size) {
size_t elem_size,
bool use_hperf = true) {
size_t block_size;
if (algorithm != CompressionAlgorithm::NO_COMPRESSION) {
if (bshuf_read_uint64_BE(const_cast<uint8_t *>(source)) != nelements * elem_size)
@@ -40,15 +128,27 @@ inline void JFJochDecompressPtr(uint8_t *output,
memcpy(output, source, source_size);
break;
case CompressionAlgorithm::BSHUF_LZ4:
if (bshuf_decompress_lz4(source + 12, output, nelements,
elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
if (use_hperf) {
if (JFJochDecompressHperfPtr(output, algorithm, source + 12, source_size - 12,
nelements, elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
} else {
if (bshuf_decompress_lz4(source + 12, output, nelements,
elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
}
break;
case CompressionAlgorithm::BSHUF_ZSTD_RLE:
case CompressionAlgorithm::BSHUF_ZSTD:
if (bshuf_decompress_zstd(source + 12, output, nelements,
elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
if (use_hperf) {
if (JFJochDecompressHperfPtr(output, algorithm, source + 12, source_size - 12,
nelements, elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
} else {
if (bshuf_decompress_zstd(source + 12, output, nelements,
elem_size, block_size) != source_size - 12)
throw JFJochException(JFJochExceptionCategory::Compression, "Decompression error");
}
break;
default:
throw JFJochException(JFJochExceptionCategory::Compression, "Not implemented algorithm");
@@ -57,15 +157,16 @@ inline void JFJochDecompressPtr(uint8_t *output,
template <class Td, class Ts>
void JFJochDecompress(std::vector<Td> &output, CompressionAlgorithm algorithm, const Ts *source_v, size_t source_size,
size_t nelements) {
size_t nelements, bool use_hperf = true) {
output.resize(nelements);
JFJochDecompressPtr((uint8_t *) output.data(), algorithm, (uint8_t *) source_v, source_size, nelements, sizeof(Td));
JFJochDecompressPtr((uint8_t *) output.data(), algorithm, (uint8_t *) source_v, source_size,
nelements, sizeof(Td), use_hperf);
}
template <class Td, class Ts>
void JFJochDecompress(std::vector<Td> &output, CompressionAlgorithm algorithm, const std::vector<Ts> source_v,
size_t nelements) {
JFJochDecompress(output, algorithm, source_v.data(), source_v.size() * sizeof(Ts), nelements);
size_t nelements, bool use_hperf = true) {
JFJochDecompress(output, algorithm, source_v.data(), source_v.size() * sizeof(Ts), nelements, use_hperf);
}
#endif //JUNGFRAUJOCH_JFJOCHDECOMPRESS_H

View File

@@ -200,6 +200,44 @@ RUN set -eux; \
cd /; \
rm -rf /tmp/dials.tar.xz /tmp/dials-*-linux-x86_64
RUN set -eux; \
mkdir -p /opt/xds; \
cd /tmp; \
curl -fL -o XDS-gfortran_Linux_x86_64.tar.gz https://xds.mr.mpg.de/XDS-gfortran_Linux_x86_64.tar.gz; \
tar -xzf XDS-gfortran_Linux_x86_64.tar.gz; \
cp -a /tmp/XDS-gfortran_Linux_x86_64/. /opt/xds/; \
curl -fL -o durin-plugin-rhel7-x86.zip https://github.com/DiamondLightSource/durin/releases/download/v2023-10/durin-plugin-rhel7-x86.zip; \
unzip durin-plugin-rhel7-x86.zip; \
mv durin-plugin.so /opt/xds/durin-plugin.so; \
chmod 755 /opt/xds/durin-plugin.so; \
rm -rf /tmp/XDS-gfortran_Linux_x86_64 /tmp/XDS-gfortran_Linux_x86_64.tar.gz /tmp/durin-plugin-rhel7-x86.zip
RUN set -eux; \
cd /tmp; \
curl -fL -o dectris-neggia-1.2.0.el7.tar.gz \
https://github.com/dectris/neggia/files/6585943/dectris-neggia-1.2.0.el7.tar.gz; \
tar -xzf dectris-neggia-1.2.0.el7.tar.gz; \
find . -name 'dectris-neggia*.so' -print; \
mkdir -p /opt/xds; \
cp -av "$(find . -name 'dectris-neggia*.so' | head -n 1)" /opt/xds/dectris-neggia.so; \
chmod 755 /opt/xds/dectris-neggia.so; \
rm -rf /tmp/dectris-neggia-1.2.0.el7.tar.gz /tmp/*
RUN dnf -y install python3-Cython python3-pip python3-devel && dnf clean all
RUN set -eux; \
cd /tmp; \
git clone --depth 1 https://github.com/kiyo-masui/bitshuffle.git; \
cd bitshuffle; \
mkdir -p bitshuffle/plugin; \
python3 setup.py build_ext --inplace; \
python3 setup.py install; \
mkdir -p /usr/local/hdf5/lib/plugin; \
plugin_so="$(find build -type f -name 'libh5bshuf*.so' | head -n 1)"; \
test -n "$plugin_so"; \
cp -av "$plugin_so" /usr/local/hdf5/lib/plugin/; \
rm -rf /tmp/bitshuffle
# Make Qt and Eigen discoverable by CMake
ENV CMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH}:/opt/hdf5-${HDF5_TAG}-static:/opt/qt-${QT_VERSION}-static

19
docs/JFJOCH_VIEWER.md Normal file
View File

@@ -0,0 +1,19 @@
# jfjoch_viewer
Jungfraujoch diffraction viewer is distributed as a standalone application.
It uses Qt library version 6, and allows to open HDF5 files generated by [`jfjoch_writer`](JFJOCH_WRITER.md).
It as well allows to open NXmx files written by DECTRIS detectors, though testing is very limited.
It can be downloaded pre-built from Gitea release page, or from Jungfraujoch RPM/APT repositories.
See [Deployment](DEPLOYMENT.md) section for more information.
This viewer can be also sync online with [`jfjoch_broker`](JFJOCH_BROKER.md) HTTP interface
to visualize data during data collection.
# Data processing pipeline
Viewer contains embedded data processing pipeline, which is the same as the one used
in Jungfraujoch installation. On systems equipped with GPU, it is preferable to use version
compiled with CUDA support (proper RPM/APT repositories).
At the moment data processing results are not saved to disk.
# DBus interface
Viewer can be controlled externally with DBus interface, allowing to open file, open detector via HTTP or load image.

View File

@@ -130,19 +130,25 @@ There are custom extension to NXmx format. These will be documented in the futur
Specifically, if data collection was configured with `header_appendix` having key equal to `hdf5` and value as JSON
object with number and string values. These will be added to `/entry/user`.
There are two versions of master file possible.
There are three versions of master file possible.
#### Legacy version (NXmxLegacy)
By default, *legacy version* is used. This version is compatible with DECTRIS file writer version 1.0 format.
This ensures the file compatibility of Neggia and Durin XDS plugins, as well as DECTRIS Albula viewer version 4.0.
Distinct feature is that if images are split into data files, there will be multiple links in `/entry/data`,
each corresponding to a data file.
Yet, certain new HDF5 features, like virtual datasets, are not possible in this format since it has to be compatible with HDF5 1.8 features.
#### VDS format (NXmxVDS)
Therefore, we have enabled format *VDS version*. This will link to all data files via a single virtual dataset `/entry/data/data`.
The same way spot finding, azimuthal integration and others, will be linked between master and data files.
This format allows to display processing results in currently developed Jungfraujoch Viewer.
For the time being it only works with Durin XDS plugin, and require DECTRIS Albula viewer version 4.1+.
#### Integrated format (NXmxIntegrated)
This is format, where no data files are created, but both images and metadata are stored in the same master file.
This is generally equivalent to VDS format described above.
### Data file
Data file has the following structure:

View File

@@ -0,0 +1,24 @@
# Integration with MX data processing software
## XDS
Jungfraujoch files are compatible with XDS, but there is a need of a dedicated plugin.
First we recommend to use Jungfraujoch own XDS plugin. It is available for Linux only and can
be downloaded from Gitea release directory (compiled on RHEL 8), it is also distributed in [`jfjoch_viewer`](JFJOCH_VIEWER.md) RPM/APT packages.
To use the plugin, download the file `libjfjoch_xds_plugin.so.1.0.0` (three numbers at the end represent version of the plugin, and can differ later in time),
save it to common directory (e.g., `/opt/xds`) and add the following line in the XDS.INP file:
```
LIB="/opt/xds/libjfjoch_xds_plugin.so.1.0.0"
```
We are also testing XDS with Durin and Neggia plugins, though they don't have full functionality:
* Neggia plugin doesn't support HDF5 virtual data sets. It can be downloaded from [github.com/dectris/neggia](https://github.com/dectris/neggia/).
* Durin has known bugs with handling non-DECTRIS files (so with virtual data sets or single format HDF5 file format).
We recommend Durin plugin prepared by the Global Phasing consortium: [github.com/CV-GPhL/durin](https://github.com/CV-GPhL/durin), rather than original from the Diamond Light Source.
## DIALS
Jungfraujoch files are tested regularly with DIALS (currently v. 3.27.0).
There is one known limitation: files generated with NXmxLegacy format (mimicking DECTRIS filewriter1 format)
are not handled properly with DIALS. We recommend VDS based format (NXmxVDS).
## CrystFEL
Jungfraujoch files are compatible with CrystFEL.

View File

@@ -30,6 +30,7 @@ Jungfraujoch is distributed under the GPLv3 license.
JFJOCH_BROKER
JFJOCH_WRITER
SOFTWARE_INTEGRATION
TOOLS
.. toctree::

View File

@@ -1,15 +1,17 @@
# FileWriterFormat
None - no master file written NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
NoFileWritten - no files are written at all NXmxOnlyData - only data files are written, no master file NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling NXmxIntegrated - single HDF5 per dataset CBF - CBF format (limited metadata) TIFF - TIFF format (no metadata)
## Enum
* `NONE` (value: `'None'`)
* `NXMXONLYDATA` (value: `'NXmxOnlyData'`)
* `NXMXLEGACY` (value: `'NXmxLegacy'`)
* `NXMXVDS` (value: `'NXmxVDS'`)
* `NXMXINTEGRATED` (value: `'NXmxIntegrated'`)
* `CBF` (value: `'CBF'`)
* `TIFF` (value: `'TIFF'`)

View File

@@ -963,6 +963,7 @@ namespace {
switch (tmp) {
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxIntegrated:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::CBF:
case FileWriterFormat::TIFF:
@@ -1022,7 +1023,7 @@ namespace {
if (j.contains("overwrite"))
message.overwrite = j["overwrite"];
if (j.contains("xfel_pulse_id"))
message.overwrite = j["xfel_pulse_id"];
message.xfel_pulse_id = j["xfel_pulse_id"];
if (j.contains("file_format"))
message.file_format = ProcessHDF5Format(j["file_format"]);
if (j.contains("poni_rot1"))

View File

@@ -30,8 +30,8 @@ function stringToEnum(value: string): file_writer_format {
(v) => v === value
) as file_writer_format;
// If no match is found, default to file_writer_format.NONE
return enumValue || file_writer_format.NONE;
// If no match is found, default to file_writer_format.NXMX_ONLY_DATA
return enumValue || file_writer_format.NXMX_ONLY_DATA;
}
@@ -109,7 +109,8 @@ class FileWriterSettings extends React.Component<MyProps, MyState> {
>
<MenuItem value={file_writer_format.NXMX_LEGACY}>NXmx HDF5 master file with soft links (DECTRIS file writer compatibility)</MenuItem>
<MenuItem value={file_writer_format.NXMX_VDS}>NXmx HDF5 master file with virtual datasets</MenuItem>
<MenuItem value={file_writer_format.NONE}>No NXmx HDF5 master file (only data files)</MenuItem>
<MenuItem value={file_writer_format.NXMX_INTEGRATED}>Single HDF5 file with data and metadata</MenuItem>
<MenuItem value={file_writer_format.NXMX_ONLY_DATA}>No NXmx HDF5 master file (only data files)</MenuItem>
<MenuItem value={file_writer_format.CBF}>miniCBF (only data files; limited metadata)</MenuItem>
<MenuItem value={file_writer_format.TIFF}>TIFF (only data files; no metadata)</MenuItem>
<MenuItem value={file_writer_format.NO_FILE_WRITTEN}>No files saved</MenuItem>

View File

@@ -4,15 +4,20 @@
/* eslint-disable */
/**
* None - no master file written
* NoFileWritten - no files are written at all
* NXmxOnlyData - only data files are written, no master file
* NXmxLegacy - legacy format with soft links to data files in the master file; necessary for DECTRIS Albula 4.0 and DECTRIS Neggia
* NXmxVDS - newer format with virtual dataset linking data files in the master file, also includes better metadata handling
* NXmxIntegrated - single HDF5 per dataset
* CBF - CBF format (limited metadata)
* TIFF - TIFF format (no metadata)
*
*/
export enum file_writer_format {
NONE = 'None',
NXMX_ONLY_DATA = 'NXmxOnlyData',
NXMX_LEGACY = 'NXmxLegacy',
NXMX_VDS = 'NXmxVDS',
NXMX_INTEGRATED = 'NXmxIntegrated',
CBF = 'CBF',
TIFF = 'TIFF',
NO_FILE_WRITTEN = 'NoFileWritten',

View File

@@ -10,5 +10,6 @@ ADD_LIBRARY(JFJochReader STATIC
JFJochHttpReader.h
)
TARGET_LINK_LIBRARIES(JFJochReader JFJochImageAnalysis JFJochAPI JFJochCommon JFJochZMQ JFJochLogger JFJochHDF5Wrappers CBORStream2FrameSerialize
TARGET_LINK_LIBRARIES(JFJochReader JFJochImageAnalysis JFJochAPI JFJochCommon JFJochZMQ JFJochLogger
JFJochHDF5Wrappers CBORStream2FrameSerialize
httplib::httplib ${CMAKE_DL_LIBS})

View File

@@ -116,8 +116,13 @@ void JFJochHDF5Reader::ReadFile(const std::string& filename) {
size_t image_size_x = 0;
size_t image_size_y = 0;
if (master_file->Exists("/entry/data/data")) {
legacy_format = false;
if (master_file->Exists("/entry/data/data"))
format = FileWriterFormat::NXmxVDS;
else if (master_file->Exists("/entry/data/data_000001")) {
format = FileWriterFormat::NXmxLegacy;
}
if (format == FileWriterFormat::NXmxVDS ) {
auto dim = GetDimension(*master_file, "/entry/data/data");
number_of_images = dim[0];
image_size_y = dim[1];
@@ -159,8 +164,7 @@ void JFJochHDF5Reader::ReadFile(const std::string& filename) {
}
if (master_file->Exists("/entry/image"))
dataset->max_value = master_file->ReadOptVector<int64_t>("/entry/image/max_value");
} else if (master_file->Exists("/entry/data/data_000001")) {
legacy_format = true;
} else if (format == FileWriterFormat::NXmxLegacy) {
legacy_format_files.clear();
image_size_x = master_file->GetInt("/entry/instrument/detector/detectorSpecific/x_pixels_in_detector");
@@ -334,7 +338,7 @@ void JFJochHDF5Reader::ReadFile(const std::string& filename) {
dataset->error_value = master_file->GetOptInt("/entry/instrument/detector/error_value");
dataset->jfjoch_release = master_file->GetString("/entry/instrument/detector/jfjoch_release");
dataset->jfjoch_release = master_file->GetString("/entry/instrument/detector/detectorSpecific/jfjoch_release");
InstrumentMetadata metadata;
metadata.InstrumentName(master_file->GetString("/entry/instrument/name"));
@@ -457,7 +461,9 @@ uint64_t JFJochHDF5Reader::GetNumberOfImages() const {
CompressedImage JFJochHDF5Reader::LoadImageDataset(std::vector<uint8_t> &tmp, HDF5Object &file, hsize_t number) {
std::vector<hsize_t> start = {static_cast<hsize_t>(number), 0, 0};
HDF5DataSet dataset(file, "/entry/data/data");
const std::string dataset_name = "/entry/data/data";
HDF5DataSet dataset(file, dataset_name);
HDF5DataSpace dataspace(dataset);
HDF5DataType datatype(dataset);
HDF5Dcpl dcpl(dataset);
@@ -508,7 +514,7 @@ bool JFJochHDF5Reader::LoadImage_i(std::shared_ptr<JFJochReaderDataset> &dataset
uint32_t image_id;
HDF5Object *source_file;
if (legacy_format) {
if (format == FileWriterFormat::NXmxLegacy) {
uint32_t file_id = image_number / images_per_file;
image_id = image_number % images_per_file;
tmp_data_file = std::make_unique<HDF5ReadOnlyFile>(legacy_format_files.at(file_id));

View File

@@ -8,11 +8,12 @@
#include "../writer/HDF5Objects.h"
class JFJochHDF5Reader : public JFJochReader {
FileWriterFormat format = FileWriterFormat::NoFile;
std::unique_ptr<HDF5ReadOnlyFile> master_file;
std::vector<std::string> legacy_format_files;
bool legacy_format = false;
size_t images_per_file = 1;
size_t number_of_images = 0;

View File

@@ -66,7 +66,9 @@ ADD_EXECUTABLE(jfjoch_test
TCPImagePusherTest.cpp
BraggIntegrate2DTest.cpp
SearchSpaceGroupTest.cpp
XDSPluginTest.cpp
)
target_link_libraries(jfjoch_test Catch2WithMain JFJochBroker JFJochReceiver JFJochReader JFJochWriter JFJochImageAnalysis JFJochCommon JFJochHLSSimulation JFJochPreview)
target_link_libraries(jfjoch_test Catch2WithMain JFJochBroker JFJochReceiver JFJochReader JFJochWriter JFJochImageAnalysis JFJochCommon JFJochHLSSimulation JFJochPreview
jfjoch_xds_plugin)
target_include_directories(jfjoch_test PRIVATE .)

View File

@@ -13,6 +13,64 @@
using namespace std::literals::chrono_literals;
TEST_CASE("HDF5Group_create_reopen_and_fail", "[HDF5][Unit]") {
{
HDF5File file("scratch_group_reopen.h5");
REQUIRE_NOTHROW(HDF5Group(file, "/group1"));
REQUIRE(file.Exists("/group1"));
REQUIRE_NOTHROW(HDF5Group(file, "/group1"));
REQUIRE(file.Exists("/group1"));
REQUIRE_THROWS(HDF5Group(file, "/missing_parent/group2"));
}
remove("scratch_group_reopen.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Attr_string_update", "[HDF5][Unit]") {
const std::string first_value = "abc";
const std::string second_value = "a much longer attribute value";
{
HDF5File file("scratch_attr_string_update.h5");
REQUIRE_NOTHROW(file.Attr("str_attr", first_value));
REQUIRE_NOTHROW(file.Attr("str_attr", second_value));
}
{
HDF5ReadOnlyFile file("scratch_attr_string_update.h5");
REQUIRE(file.ReadAttrStr("str_attr") == second_value);
}
remove("scratch_attr_string_update.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Attr_int64_update", "[HDF5][Unit]") {
const int64_t first_value = -1234567890123LL;
const int64_t second_value = 9876543210123LL;
{
HDF5File file("scratch_attr_int64_update.h5");
REQUIRE_NOTHROW(file.Attr("int_attr", first_value));
REQUIRE(file.ReadAttrInt("int_attr") == first_value);
REQUIRE_NOTHROW(file.Attr("int_attr", second_value));
REQUIRE(file.ReadAttrInt("int_attr") == second_value);
}
{
HDF5ReadOnlyFile file("scratch_attr_int64_update.h5");
REQUIRE(file.ReadAttrInt("int_attr") == second_value);
}
remove("scratch_attr_int64_update.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5DataSet_scalar", "[HDF5][Unit]") {
uint16_t tmp_scalar = 16788;
{
@@ -759,6 +817,274 @@ TEST_CASE("HDF5Writer_Link_VDS", "[HDF5][Full]") {
REQUIRE (H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("HDF5Writer_NXmxIntegrated_Basic", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).ImagesPerFile(2).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_basic");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
// NXmxIntegrated forces all images into one file
REQUIRE(x.GetImagesPerFile() == x.GetImageNum());
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
REQUIRE(start_message.file_format == FileWriterFormat::NXmxIntegrated);
// images_per_file should equal total images for integrated
REQUIRE(start_message.images_per_file == x.GetImageNum());
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 42);
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
// All images in one file — only one stats entry
REQUIRE(stats.size() == 1);
REQUIRE(stats[0].total_images == x.GetImageNum());
}
// Single integrated file, no separate master or data files
REQUIRE(!std::filesystem::exists("integrated_basic.h5"));
REQUIRE(std::filesystem::exists("integrated_basic_master.h5"));
REQUIRE(!std::filesystem::exists("integrated_basic_data_000001.h5"));
{
HDF5ReadOnlyFile file("integrated_basic_master.h5");
// Data should be directly in the file
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
HDF5DataSpace file_space(*dataset);
REQUIRE(file_space.GetNumOfDimensions() == 3);
REQUIRE(file_space.GetDimensions()[0] == x.GetImageNum());
REQUIRE(file_space.GetDimensions()[1] == x.GetYPixelsNum());
REQUIRE(file_space.GetDimensions()[2] == x.GetXPixelsNum());
// Master metadata should also be present
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/instrument/detector/beam_center_x"));
REQUIRE(dataset->ReadScalar<float>() == Catch::Approx(x.GetBeamX_pxl()));
// No external links (unlike NXmxLegacy)
REQUIRE_THROWS(std::make_unique<HDF5DataSet>(file, "/entry/data/data_000001"));
}
// No leftover HDF5 objects
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_basic_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_WithSpots", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(3).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_spots");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 10);
std::vector<SpotToSave> spots;
spots.push_back({10.0f, 20.0f, 100.0f});
spots.push_back({30.0f, 40.0f, 200.0f});
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.spots = spots;
message.number = i;
message.image_collection_efficiency = 1.0f;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
}
REQUIRE(std::filesystem::exists("integrated_spots_master.h5"));
{
HDF5ReadOnlyFile file("integrated_spots_master.h5");
// Detector plugin data should exist in the same file
REQUIRE(file.Exists("/entry/detector"));
// Image data should exist
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_spots_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_ZeroImages", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_zero");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = 0;
FileWriter writer(start_message);
// Write no images — just finalize
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
// No data files created
REQUIRE(stats.empty());
}
// Master file should still exist with metadata
REQUIRE(std::filesystem::exists("integrated_zero_master.h5"));
{
HDF5ReadOnlyFile file("integrated_zero_master.h5");
REQUIRE(file.Exists("/entry"));
// No data dataset since no images written
REQUIRE_THROWS(std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_zero_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_AzInt", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.DetectorDistance_mm(50).BeamX_pxl(500).BeamY_pxl(500);
x.QSpacingForAzimInt_recipA(0.1).QRangeForAzimInt_recipA(0.1, 4.0);
x.ImagesPerTrigger(3).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_azint");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
PixelMask pixel_mask(x);
AzimuthalIntegration mapping(x, pixel_mask);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
start_message.az_int_bin_to_q = mapping.GetBinToQ();
start_message.az_int_phi_bin_count = mapping.GetAzimuthalBinCount();
start_message.az_int_q_bin_count = mapping.GetQBinCount();
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 5);
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.az_int_profile = std::vector<float>(mapping.GetBinNumber(), static_cast<float>(i));
message.number = i;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
}
REQUIRE(std::filesystem::exists("integrated_azint_master.h5"));
{
HDF5ReadOnlyFile file("integrated_azint_master.h5");
// Azimuthal integration bin mapping should exist (written by plugin)
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/azint/bin_to_q"));
// Per-image azint data should exist
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/azint/image"));
HDF5DataSpace space(*dataset);
REQUIRE(space.GetNumOfDimensions() == 3);
REQUIRE(space.GetDimensions()[0] == x.GetImageNum());
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_azint_master.h5");
}
TEST_CASE("HDF5Writer_NXmxIntegrated_OutOfOrder", "[HDF5][Full]") {
// Test that out-of-order image delivery works with NXmxIntegrated
DiffractionExperiment x(DetJF(1));
x.ImagesPerTrigger(5).Compression(CompressionAlgorithm::NO_COMPRESSION)
.FilePrefix("integrated_ooo");
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true);
{
RegisterHDF5Filter();
StartMessage start_message;
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
FileWriter writer(start_message);
std::vector<int16_t> image(x.GetPixelsNum(), 7);
// Write images out of order
std::vector<int> order = {3, 1, 4, 0, 2};
for (int idx : order) {
DataMessage message{};
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = idx;
REQUIRE_NOTHROW(writer.Write(message));
}
writer.WriteHDF5(end_message);
auto stats = writer.Finalize();
REQUIRE(stats.size() == 1);
REQUIRE(stats[0].total_images == 5);
}
REQUIRE(std::filesystem::exists("integrated_ooo_master.h5"));
{
HDF5ReadOnlyFile file("integrated_ooo_master.h5");
std::unique_ptr<HDF5DataSet> dataset;
REQUIRE_NOTHROW(dataset = std::make_unique<HDF5DataSet>(file, "/entry/data/data"));
HDF5DataSpace file_space(*dataset);
REQUIRE(file_space.GetDimensions()[0] == 5);
}
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
remove("integrated_ooo_master.h5");
}
TEST_CASE("HDF5Writer_NoMasterFile", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));

View File

@@ -1414,3 +1414,86 @@ TEST_CASE("JFJochReader_InstrumentMetadata_Sample_RingCurrent", "[HDF5][Full]")
remove("test_meta_master.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("JFJochReader_NXmxIntegrated", "[HDF5][Full]") {
DiffractionExperiment x(DetJF(1));
x.FilePrefix("test_reader_integrated").ImagesPerTrigger(3).OverwriteExistingFiles(true);
x.BitDepthImage(16).PixelSigned(false).SetFileWriterFormat(FileWriterFormat::NXmxIntegrated);
x.Compression(CompressionAlgorithm::NO_COMPRESSION);
x.BeamX_pxl(100).BeamY_pxl(200).DetectorDistance_mm(150)
.IncidentEnergy_keV(WVL_1A_IN_KEV)
.FrameTime(std::chrono::microseconds(500), std::chrono::microseconds(10));
AzimuthalIntegrationSettings azint_settings;
azint_settings.AzimuthalBinCount(4);
x.ImportAzimuthalIntegrationSettings(azint_settings);
std::vector<uint16_t> image(x.GetPixelsNum(), 0);
image[0] = UINT16_MAX;
image[1] = 123;
image[5678] = 321;
AzimuthalIntegration azint(x, PixelMask(x));
RegisterHDF5Filter();
{
StartMessage start_message;
x.FillMessage(start_message);
start_message.az_int_bin_to_q = azint.GetBinToQ();
start_message.az_int_bin_to_phi = azint.GetBinToPhi();
start_message.az_int_q_bin_count = azint.GetQBinCount();
start_message.az_int_phi_bin_count = azint.GetAzimuthalBinCount();
FileWriter file_set(start_message);
for (int i = 0; i < x.GetImageNum(); i++) {
DataMessage message{};
image[5678] = 321 + i;
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
message.image_collection_efficiency = 0.9f + 0.01f * i;
message.az_int_profile = std::vector<float>(azint_settings.GetBinCount(), static_cast<float>(50 + i));
REQUIRE_NOTHROW(file_set.WriteHDF5(message));
}
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
file_set.WriteHDF5(end_message);
file_set.Finalize();
}
{
JFJochHDF5Reader reader;
REQUIRE_NOTHROW(reader.ReadFile("test_reader_integrated_master.h5"));
auto dataset = reader.GetDataset();
CHECK(dataset->experiment.GetImageNum() == 3);
REQUIRE(dataset->efficiency.size() == 3);
CHECK(dataset->efficiency[0] == Catch::Approx(0.90f));
CHECK(dataset->efficiency[1] == Catch::Approx(0.91f));
CHECK(dataset->efficiency[2] == Catch::Approx(0.92f));
CHECK(dataset->az_int_bin_to_q.size() == azint_settings.GetBinCount());
CHECK(dataset->azimuthal_bins == azint_settings.GetAzimuthalBinCount());
CHECK(dataset->q_bins == azint_settings.GetQBinCount());
std::shared_ptr<JFJochReaderImage> reader_image;
REQUIRE_NOTHROW(reader_image = reader.LoadImage(1));
REQUIRE(reader_image);
CHECK(reader_image->Image()[0] == SATURATED_PXL_VALUE);
CHECK(reader_image->Image()[1] == 123);
CHECK(reader_image->Image()[5678] == 322);
REQUIRE(reader_image->ImageData().image_collection_efficiency.has_value());
CHECK(reader_image->ImageData().image_collection_efficiency.value() == Catch::Approx(0.91f));
REQUIRE(reader_image->ImageData().az_int_profile.size() == azint_settings.GetBinCount());
CHECK(reader_image->ImageData().az_int_profile[0] == Catch::Approx(51.0f));
CHECK(reader_image->ImageData().az_int_profile[23] == Catch::Approx(51.0f));
}
remove("test_reader_integrated_master.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}

View File

@@ -1749,4 +1749,87 @@ TEST_CASE("JFJochIntegrationTest_TCP_calibration", "[JFJochReceiver]") {
REQUIRE(!service.GetProgress().has_value());
REQUIRE_NOTHROW(writer_future.get());
}
}
TEST_CASE("JFJochIntegrationTest_TCP_lysozyme_spot_and_index_single_file", "[JFJochReceiver]") {
Logger logger(Catch::getResultCapture().getCurrentTestName());
RegisterHDF5Filter();
const uint16_t nthreads = 4;
DiffractionExperiment experiment(DetJF4M());
experiment.ImagesPerTrigger(5).NumTriggers(1).UseInternalPacketGenerator(true).ImagesPerFile(2)
.FilePrefix("lyso_test_tcp_single_file").JungfrauConvPhotonCnt(false).SetFileWriterFormat(FileWriterFormat::NXmxIntegrated).OverwriteExistingFiles(true)
.DetectorDistance_mm(75).BeamY_pxl(1136).BeamX_pxl(1090).IncidentEnergy_keV(12.4)
.SetUnitCell(UnitCell{.a = 36.9, .b = 78.95, .c = 78.95, .alpha =90, .beta = 90, .gamma = 90});
experiment.SampleTemperature_K(123.0).RingCurrent_mA(115);
PixelMask pixel_mask(experiment);
// Load example image
HDF5ReadOnlyFile data("../../tests/test_data/compression_benchmark.h5");
HDF5DataSet dataset(data, "/entry/data/data");
HDF5DataSpace file_space(dataset);
REQUIRE(file_space.GetDimensions()[2] == experiment.GetXPixelsNum());
REQUIRE(file_space.GetDimensions()[1] == experiment.GetYPixelsNum());
std::vector<int16_t> image_conv (file_space.GetDimensions()[1] * file_space.GetDimensions()[2]);
std::vector<hsize_t> start = {4,0,0};
std::vector<hsize_t> file_size = {1, file_space.GetDimensions()[1], file_space.GetDimensions()[2]};
dataset.ReadVector(image_conv, start, file_size);
std::vector<int16_t> image_raw_geom(experiment.GetModulesNum() * RAW_MODULE_SIZE);
ConvertedToRawGeometry(experiment, image_raw_geom.data(), image_conv.data());
logger.Info("Loaded image");
// Setup acquisition device
AcquisitionDeviceGroup aq_devices;
std::unique_ptr<HLSSimulatedDevice> test = std::make_unique<HLSSimulatedDevice>(0, 64);
for (int m = 0; m < experiment.GetModulesNum(); m++)
test->SetInternalGeneratorFrame((uint16_t *) image_raw_geom.data() + m * RAW_MODULE_SIZE, m);
aq_devices.Add(std::move(test));
TCPStreamPusher pusher("tcp://127.0.0.1:9121", 1);
TCPImagePuller puller("tcp://127.0.0.1:9121");
StreamWriter writer(logger, puller);
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
JFJochReceiverService service(aq_devices, logger, pusher);
service.NumThreads(nthreads);
service.Indexing(experiment.GetIndexingSettings());
// No progress value at the start of measurement
REQUIRE(!service.GetProgress().has_value());
SpotFindingSettings settings = DiffractionExperiment::DefaultDataProcessingSettings();
settings.signal_to_noise_threshold = 2.5;
settings.photon_count_threshold = 5;
settings.min_pix_per_spot = 1;
settings.max_pix_per_spot = 200;
settings.high_resolution_limit = 2.0;
settings.low_resolution_limit = 50.0;
service.SetSpotFindingSettings(settings);
service.Start(experiment, pixel_mask, nullptr);
auto receiver_out = service.Stop();
CHECK(receiver_out.efficiency == 1.0);
REQUIRE(receiver_out.status.indexing_rate);
CHECK(receiver_out.status.indexing_rate.value() == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(receiver_out.writer_err.empty());
CHECK(!receiver_out.status.cancelled);
// No progress value at the end of measurement
REQUIRE(!service.GetProgress().has_value());
REQUIRE_NOTHROW(writer_future.get());
auto ack = pusher.GetImagesWritten();
REQUIRE(ack.has_value());
CHECK(ack == experiment.GetImageNum());
}

257
tests/XDSPluginTest.cpp Normal file
View File

@@ -0,0 +1,257 @@
// SPDX-FileCopyrightText: 2025 Filip Leonarski, Paul Scherrer Institute <filip.leonarski@psi.ch>
// SPDX-License-Identifier: GPL-3.0-only
#include <catch2/catch_all.hpp>
#include "../common/DiffractionExperiment.h"
#include "../writer/FileWriter.h"
#include "../xds-plugin/plugin.h"
namespace {
void CleanupFiles(const std::string& prefix, int file_count) {
remove((prefix + "_master.h5").c_str());
for (int i = 1; i <= file_count; ++i) {
char data_file[256];
snprintf(data_file, sizeof(data_file), "%s_data_%06d.h5", prefix.c_str(), i);
remove(data_file);
}
}
void ClosePlugin() {
int error_flag = 0;
plugin_close(&error_flag);
REQUIRE(error_flag == 0);
}
}
TEST_CASE("XDSPlugin_GetData_VDS", "[HDF5][XDS][Plugin]") {
DiffractionExperiment x(DetJF(1));
x.FilePrefix("test_xds_vds").ImagesPerTrigger(3).ImagesPerFile(1).OverwriteExistingFiles(true);
x.SetFileWriterFormat(FileWriterFormat::NXmxVDS);
x.BitDepthImage(16).PixelSigned(false);
x.BeamX_pxl(100).BeamY_pxl(200).DetectorDistance_mm(150)
.IncidentEnergy_keV(WVL_1A_IN_KEV)
.FrameTime(std::chrono::microseconds(500), std::chrono::microseconds(10));
x.Compression(CompressionAlgorithm::NO_COMPRESSION);
RegisterHDF5Filter();
std::vector<uint32_t> pixel_mask(x.GetPixelsNum(), 0);
std::vector<uint16_t> image(x.GetPixelsNum(), 7);
image[0] = UINT16_MAX;
image[1] = 123;
{
StartMessage start_message;
x.FillMessage(start_message);
start_message.pixel_mask["default"] = pixel_mask;
FileWriter file_set(start_message);
for (int i = 0; i < 3; ++i) {
DataMessage message{};
image[5678] = static_cast<uint16_t>(200 + i);
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
REQUIRE_NOTHROW(file_set.WriteHDF5(message));
}
EndMessage end_message;
end_message.max_image_number = 3;
file_set.WriteHDF5(end_message);
file_set.Finalize();
}
{
int info[1024] = {};
int error_flag = 0;
REQUIRE_NOTHROW(plugin_open("test_xds_vds_master.h5", info, &error_flag));
REQUIRE(error_flag == 0);
int nx = 0, ny = 0, nbytes = 0, number_of_frames = 0;
float qx = 0.0f, qy = 0.0f;
REQUIRE_NOTHROW(plugin_get_header(&nx, &ny, &nbytes, &qx, &qy, &number_of_frames, info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(nx == static_cast<int>(x.GetXPixelsNum()));
CHECK(ny == static_cast<int>(x.GetYPixelsNum()));
CHECK(nbytes == 2);
CHECK(number_of_frames == 3);
std::vector<int> data(static_cast<size_t>(nx) * static_cast<size_t>(ny), 0);
int frame_number = 2;
REQUIRE_NOTHROW(plugin_get_data(&frame_number, &nx, &ny, data.data(), info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(data[0] == -1);
CHECK(data[1] == 123);
CHECK(data[5678] == 201);
ClosePlugin();
}
CleanupFiles("test_xds_vds", 3);
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("XDSPlugin_GetData_Legacy", "[HDF5][XDS][Plugin]") {
DiffractionExperiment x(DetJF(1));
x.FilePrefix("test_xds_legacy").ImagesPerTrigger(2).ImagesPerFile(1).OverwriteExistingFiles(true);
x.SetFileWriterFormat(FileWriterFormat::NXmxLegacy);
x.BitDepthImage(16).PixelSigned(true);
x.BeamX_pxl(100).BeamY_pxl(200).DetectorDistance_mm(150)
.IncidentEnergy_keV(WVL_1A_IN_KEV)
.FrameTime(std::chrono::microseconds(500), std::chrono::microseconds(10));
x.Compression(CompressionAlgorithm::NO_COMPRESSION);
RegisterHDF5Filter();
std::vector<uint32_t> pixel_mask(x.GetPixelsNum(), 0);
std::vector<int16_t> image(x.GetPixelsNum(), 0);
image[0] = INT16_MAX;
image[1] = INT16_MIN;
image[2] = 456;
{
StartMessage start_message;
x.FillMessage(start_message);
start_message.pixel_mask["default"] = pixel_mask;
FileWriter file_set(start_message);
for (int i = 0; i < 2; ++i) {
DataMessage message{};
image[5678] = static_cast<int16_t>(10 + i);
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
REQUIRE_NOTHROW(file_set.WriteHDF5(message));
}
EndMessage end_message;
end_message.max_image_number = 2;
file_set.WriteHDF5(end_message);
file_set.Finalize();
}
{
int info[1024] = {};
int error_flag = 0;
REQUIRE_NOTHROW(plugin_open("test_xds_legacy_master.h5", info, &error_flag));
REQUIRE(error_flag == 0);
int nx = 0, ny = 0, nbytes = 0, number_of_frames = 0;
float qx = 0.0f, qy = 0.0f;
REQUIRE_NOTHROW(plugin_get_header(&nx, &ny, &nbytes, &qx, &qy, &number_of_frames, info, &error_flag));
REQUIRE(error_flag == 0);
REQUIRE(number_of_frames == 2);
std::vector<int> data(static_cast<size_t>(nx) * static_cast<size_t>(ny), 0);
int frame_number = 1;
REQUIRE_NOTHROW(plugin_get_data(&frame_number, &nx, &ny, data.data(), info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(data[0] == -1);
CHECK(data[1] == -1);
CHECK(data[2] == 456);
CHECK(data[5678] == 10);
frame_number = 2;
REQUIRE_NOTHROW(plugin_get_data(&frame_number, &nx, &ny, data.data(), info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(data[5678] == 11);
ClosePlugin();
}
CleanupFiles("test_xds_legacy", 2);
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}
TEST_CASE("XDSPlugin_GetData_Integrated", "[HDF5][XDS][Plugin]") {
DiffractionExperiment x(DetJF(1));
x.FilePrefix("test_xds_integrated").ImagesPerTrigger(3).OverwriteExistingFiles(true);
x.SetFileWriterFormat(FileWriterFormat::NXmxIntegrated);
x.BitDepthImage(16).PixelSigned(false);
x.BeamX_pxl(100).BeamY_pxl(200).DetectorDistance_mm(150)
.IncidentEnergy_keV(WVL_1A_IN_KEV)
.FrameTime(std::chrono::microseconds(500), std::chrono::microseconds(10));
x.Compression(CompressionAlgorithm::NO_COMPRESSION);
RegisterHDF5Filter();
AzimuthalIntegrationSettings azint_settings;
azint_settings.AzimuthalBinCount(4);
x.ImportAzimuthalIntegrationSettings(azint_settings);
std::vector<uint16_t> image(x.GetPixelsNum(), 0);
image[0] = UINT16_MAX;
image[1] = 321;
image[5678] = 777;
{
StartMessage start_message;
x.FillMessage(start_message);
start_message.pixel_mask["default"] = std::vector<uint32_t>(x.GetPixelsNum(), 0);
FileWriter file_set(start_message);
for (int i = 0; i < 3; ++i) {
DataMessage message{};
image[5678] = static_cast<uint16_t>(777 + i);
message.image = CompressedImage(image, x.GetXPixelsNum(), x.GetYPixelsNum());
message.number = i;
message.az_int_profile = std::vector<float>(azint_settings.GetBinCount(), static_cast<float>(10 + i));
REQUIRE_NOTHROW(file_set.WriteHDF5(message));
}
EndMessage end_message;
end_message.max_image_number = 3;
file_set.WriteHDF5(end_message);
file_set.Finalize();
}
{
int info[1024] = {};
int error_flag = 0;
REQUIRE_NOTHROW(plugin_open("test_xds_integrated_master.h5", info, &error_flag));
REQUIRE(error_flag == 0);
int nx = 0, ny = 0, nbytes = 0, number_of_frames = 0;
float qx = 0.0f, qy = 0.0f;
REQUIRE_NOTHROW(plugin_get_header(&nx, &ny, &nbytes, &qx, &qy, &number_of_frames, info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(nx == static_cast<int>(x.GetXPixelsNum()));
CHECK(ny == static_cast<int>(x.GetYPixelsNum()));
CHECK(qx == Catch::Approx(0.075));
CHECK(qy == Catch::Approx(0.075));
CHECK(nbytes == 2);
CHECK(number_of_frames == 3);
std::vector<int> data(static_cast<size_t>(nx) * static_cast<size_t>(ny), 0);
int frame_number = 3;
REQUIRE_NOTHROW(plugin_get_data(&frame_number, &nx, &ny, data.data(), info, &error_flag));
REQUIRE(error_flag == 0);
CHECK(data[0] == -1);
CHECK(data[1] == 321);
CHECK(data[5678] == 779);
ClosePlugin();
}
remove("test_xds_integrated_master.h5");
REQUIRE(H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL) == 0);
}

73
tests/xds/XDS.INP Normal file
View File

@@ -0,0 +1,73 @@
!Settings for EIGER16M @ X06SA SLS
!job control
JOB=XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT
MAXIMUM_NUMBER_OF_JOBS=1
MAXIMUM_NUMBER_OF_PROCESSORS=1
! for this experiment:
ORGX= 1097
ORGY= 1130
DETECTOR_DISTANCE= 75
OSCILLATION_RANGE= 0.088
X-RAY_WAVELENGTH= 1.0
NAME_TEMPLATE_OF_DATA_FRAMES=writing_test_??????.h5
DATA_RANGE= 1 25
!DATA_RANGE_FIXED_SCALE_FACTOR= 1900 6000 1.0
SPOT_RANGE= 1 25
!BACKGROUND_RANGE= 1 11
!REFERENCE_DATA_SET=
SPACE_GROUP_NUMBER=96
UNIT_CELL_CONSTANTS= 78.95 78.95 36.91 90.000 90.000 90.000
!REIDX= 0 0 -1 0 0 -1 0 0 -1 0 0 0
REFINE(IDXREF)=BEAM AXIS ORIENTATION POSITION !CELL
REFINE(INTEGRATE)=POSITION BEAM ORIENTATION CELL AXIS
REFINE(CORRECT)=POSITION BEAM ORIENTATION CELL AXIS
FRIEDEL'S_LAW=FALSE
STRICT_ABSORPTION_CORRECTION=FALSE ! but read Tips_and_Tricks in XDSwiki
! parameters with changes wrt default values:
TRUSTED_REGION=0.00 1.21
VALUE_RANGE_FOR_TRUSTED_DETECTOR_PIXELS=4000. 30000.
!MINIMUM_ZETA=0.05
CORRECTIONS=DECAY MODULATION ABSORP !default value
INCLUDE_RESOLUTION_RANGE=50 0.0
! parameters specifically for this detector and beamline:
DETECTOR=EIGER NX=2068 NY=2164 QX=0.075 QY=0.075 !EIGER 16M
! Defines path to the Dectris neggia library (to use H5ToXds comment this line out)
LIB=../../build/xds-plugin/libjfjoch_xds_plugin.so
MINIMUM_VALID_PIXEL_VALUE=0 OVERLOAD=30000
DIRECTION_OF_DETECTOR_X-AXIS=1 0 0
DIRECTION_OF_DETECTOR_Y-AXIS=0 1 0
INCIDENT_BEAM_DIRECTION=0 0 1
ROTATION_AXIS=1 0 0
FRACTION_OF_POLARIZATION=0.99
POLARIZATION_PLANE_NORMAL=0 1 0
SENSOR_THICKNESS=0.32
!EXCLUSION OF HORIZONTAL DEAD AREAS OF THE EIGER 16M DETECTOR + ONE PIXEL ON EACH SIDE
! UNTRUSTED_RECTANGLE= 0 4151 513 553
! UNTRUSTED_RECTANGLE= 0 4151 1064 1104
! UNTRUSTED_RECTANGLE= 0 4151 1615 1655
! UNTRUSTED_RECTANGLE= 0 4151 2166 2206
! UNTRUSTED_RECTANGLE= 0 4151 2717 2757
! UNTRUSTED_RECTANGLE= 0 4151 3268 3308
! UNTRUSTED_RECTANGLE= 0 4151 3819 3859
!EXCLUSION OF VERTICAL DEAD AREAS OF THE EIGER 16M DETECTOR + ONE PIXEL ON EACH SIDE
! UNTRUSTED_RECTANGLE= 1029 1042 0 4372
! UNTRUSTED_RECTANGLE= 2069 2082 0 4372
! UNTRUSTED_RECTANGLE= 3109 3122 0 4372
UNTRUSTED_RECTANGLE=1036 2017 1082 1637
NUMBER_OF_PROFILE_GRID_POINTS_ALONG_ALPHA/BETA=21 !used by: INTEGRATE
!MINIMUM_NUMBER_OF_PIXELS_IN_A_SPOT=
MINIMUM_FRACTION_OF_INDEXED_SPOTS=0.2
SEPMIN=4 CLUSTER_RADIUS=2
SIGNAL_PIXEL=6

View File

@@ -26,7 +26,7 @@ int main(int argc, char **argv) {
std::optional<float> rotation;
int opt;
while ((opt = getopt(argc, argv, "o:n:Vf:R:")) != -1) {
while ((opt = getopt(argc, argv, "o:n:Vf:R:S")) != -1) {
switch (opt) {
case 'o':
prefix = optarg;
@@ -37,6 +37,9 @@ int main(int argc, char **argv) {
case 'V':
format = FileWriterFormat::NXmxVDS;
break;
case 'S':
format = FileWriterFormat::NXmxIntegrated;
break;
case 'R':
rotation = atof(optarg);
break;

View File

@@ -1,4 +1,6 @@
ADD_LIBRARY(JFJochHDF5Wrappers STATIC HDF5Objects.cpp HDF5Objects.h ../compression/bitshuffle/bshuf_h5filter.c)
set_target_properties(JFJochHDF5Wrappers PROPERTIES POSITION_INDEPENDENT_CODE ON)
TARGET_LINK_LIBRARIES(JFJochHDF5Wrappers Compression hdf5-static)
ADD_LIBRARY(JFJochWriter STATIC

View File

@@ -25,6 +25,7 @@ FileWriter::FileWriter(const StartMessage &request)
switch (format) {
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
CreateHDF5MasterFile(request);
break;
case FileWriterFormat::CBF:
@@ -42,6 +43,7 @@ void FileWriter::Write(const DataMessage &msg) {
case FileWriterFormat::DataOnly:
case FileWriterFormat::NXmxLegacy:
case FileWriterFormat::NXmxVDS:
case FileWriterFormat::NXmxIntegrated:
WriteHDF5(msg);
break;
case FileWriterFormat::CBF:
@@ -70,8 +72,8 @@ void FileWriter::WriteHDF5(const DataMessage& msg) {
if (msg.number < 0)
throw JFJochException(JFJochExceptionCategory::ArrayOutOfBounds, "No support for negative images");
const uint64_t file_number = msg.number / start_message.images_per_file;
const uint64_t image_number = msg.number % start_message.images_per_file;
const uint64_t file_number = (start_message.images_per_file == 0) ? 0 : msg.number / start_message.images_per_file;
const uint64_t image_number = (start_message.images_per_file == 0) ? msg.number : msg.number % start_message.images_per_file;
if (closed_files.contains(file_number))
return;
@@ -79,9 +81,11 @@ void FileWriter::WriteHDF5(const DataMessage& msg) {
if (files.size() <= file_number)
files.resize(file_number + 1);
if (!files[file_number])
if (!files[file_number]) {
files[file_number] = std::make_unique<HDF5DataFile>(start_message, file_number);
if (format == FileWriterFormat::NXmxIntegrated && master_file)
files[file_number]->CreateFile(msg, master_file->GetFile());
}
files[file_number]->Write(msg, image_number);
if (files[file_number]->GetNumImages() == start_message.images_per_file) {
@@ -121,13 +125,13 @@ void FileWriter::CloseOldFiles(uint64_t current_image_number) {
std::vector<HDF5DataFileStatistics> FileWriter::Finalize() {
std::lock_guard<std::mutex> lock(hdf5_mutex);
if (master_file)
master_file.reset();
for (uint64_t f = 0; f < files.size(); ++f) {
if (files[f] && !closed_files.contains(f))
CloseFile(f);
}
if (master_file)
master_file.reset();
return stats;
}

View File

@@ -46,7 +46,7 @@ HDF5DataFile::HDF5DataFile(const StartMessage &msg, uint64_t in_file_number) {
}
tmp_filename = fmt::format("{}.{:08x}.tmp", filename, tmp_suffix);
plugins.emplace_back(std::make_unique<HDF5DataFilePluginROI>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginDetector>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginDetector>(msg));
plugins.emplace_back(std::make_unique<HDF5DataFilePluginAzInt>(msg));
plugins.emplace_back(std::make_unique<HDF5DataFilePluginXFEL>());
plugins.emplace_back(std::make_unique<HDF5DataFilePluginMX>(msg));
@@ -77,7 +77,7 @@ std::optional<HDF5DataFileStatistics> HDF5DataFile::Close() {
}
data_file.reset();
if (!std::filesystem::exists(filename.c_str()) || overwrite)
if (manage_file && (!std::filesystem::exists(filename.c_str()) || overwrite))
std::rename(tmp_filename.c_str(), filename.c_str());
closed = true;
@@ -102,7 +102,7 @@ HDF5DataFile::~HDF5DataFile() {
}
}
void HDF5DataFile::CreateFile(const DataMessage& msg) {
void HDF5DataFile::CreateFile(const DataMessage& msg, std::shared_ptr<HDF5File> in_data_file, bool integrated) {
HDF5Dcpl dcpl;
HDF5DataType data_type(msg.image.GetMode());
@@ -130,7 +130,7 @@ void HDF5DataFile::CreateFile(const DataMessage& msg) {
break;
}
data_file = std::make_unique<HDF5File>(tmp_filename);
data_file = in_data_file;
HDF5Group(*data_file, "/entry").NXClass("NXentry");
HDF5Group(*data_file, "/entry/data").NXClass("NXdata");
@@ -149,11 +149,10 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) {
if (image_number >= images_per_file)
throw JFJochException(JFJochExceptionCategory::FileWriteError,
"Image number out of bounds");
bool new_file = false;
if (!data_file) {
CreateFile(msg);
new_file = true;
manage_file = true;
CreateFile(msg, std::make_shared<HDF5File>(tmp_filename));
}
if (new_file || (static_cast<int64_t>(image_number) > max_image_number)) {
@@ -161,6 +160,7 @@ void HDF5DataFile::Write(const DataMessage &msg, uint64_t image_number) {
timestamp.resize(max_image_number + 1);
exptime.resize(max_image_number + 1);
number.resize(max_image_number + 1);
new_file = false;
}
nimages++;

View File

@@ -25,7 +25,7 @@ class HDF5DataFile {
std::string filename;
std::string tmp_filename;
std::unique_ptr<HDF5File> data_file = nullptr;
std::shared_ptr<HDF5File> data_file = nullptr;
std::unique_ptr<HDF5DataSet> data_set = nullptr;
std::unique_ptr<HDF5DataSet> data_set_image_number = nullptr;
std::vector<std::unique_ptr<HDF5DataFilePlugin>> plugins;
@@ -47,13 +47,16 @@ class HDF5DataFile {
bool overwrite = false;
int64_t file_number;
void CreateFile(const DataMessage& msg);
bool new_file = true;
bool manage_file = false;
public:
HDF5DataFile(const StartMessage &msg, uint64_t file_number);
~HDF5DataFile();
std::optional<HDF5DataFileStatistics> Close();
void Write(const DataMessage& msg, uint64_t image_number);
size_t GetNumImages() const;
void CreateFile(const DataMessage& msg, std::shared_ptr<HDF5File> data_file, bool integrated = false);
};
#endif //HDF5DATAFILE_H

View File

@@ -25,14 +25,13 @@ void HDF5DataFilePluginAzInt::OpenFile(HDF5File &data_file, const DataMessage &m
data_file.SaveVector("/entry/azint/bin_to_phi", az_int_bin_to_phi, dim);
az_int_image.reserve(images_per_file * azimuthal_bins * q_bins);
az_int_image.resize(msg.number * azimuthal_bins * q_bins);
}
void HDF5DataFilePluginAzInt::Write(const DataMessage &msg, uint64_t image_number) {
if (az_int_bin_to_q.empty() || q_bins <= 0 || azimuthal_bins <= 0)
return;
if (static_cast<int64_t>(image_number) >= max_image_number) {
if (image_number >= max_image_number || (max_image_number == 0)) {
max_image_number = image_number;
az_int_image.resize((max_image_number + 1) * azimuthal_bins * q_bins);
}

View File

@@ -3,6 +3,10 @@
#include "HDF5DataFilePluginDetector.h"
HDF5DataFilePluginDetector::HDF5DataFilePluginDetector(const StartMessage &msg) {
integrated_format = (msg.file_format == FileWriterFormat::NXmxIntegrated);
}
void HDF5DataFilePluginDetector::OpenFile(HDF5File &in_data_file, const DataMessage &msg, size_t images_per_file) {
jf_info.reserve(images_per_file);
storage_cell.reserve(images_per_file);
@@ -42,22 +46,24 @@ void HDF5DataFilePluginDetector::Write(const DataMessage &msg, uint64_t image_nu
}
void HDF5DataFilePluginDetector::WriteFinal(HDF5File &data_file) {
if (!jf_info.empty())
data_file.SaveVector("/entry/detector/det_info", jf_info.vec());
if (!storage_cell.empty())
data_file.SaveVector("/entry/detector/storage_cell_image", storage_cell.vec());
if (!receiver_aq_dev_delay.empty())
data_file.SaveVector("/entry/detector/rcv_delay", receiver_aq_dev_delay.vec());
if (!receiver_free_buffers.empty())
data_file.SaveVector("/entry/detector/rcv_free_send_buffers", receiver_free_buffers.vec());
if (!packets_received.empty())
data_file.SaveVector("/entry/detector/packets_received", packets_received.vec());
if (!packets_expected.empty())
data_file.SaveVector("/entry/detector/packets_expected", packets_expected.vec());
if (!pixel_sum.empty())
data_file.SaveVector("/entry/detector/pixel_sum", pixel_sum.vec());
if (!processing_time.empty())
data_file.SaveVector("/entry/detector/processing_time", processing_time.vec())->Units("s");
const std::string prefix = integrated_format ? "/entry/instrument/detector/detectorSpecific" : "/entry/detector";
data_file.SaveVector("/entry/detector/data_collection_efficiency_image", efficiency.vec());
if (!jf_info.empty())
data_file.SaveVector(prefix + "/det_info", jf_info.vec());
if (!storage_cell.empty())
data_file.SaveVector(prefix + "/storage_cell_image", storage_cell.vec());
if (!receiver_aq_dev_delay.empty())
data_file.SaveVector(prefix + "/rcv_delay", receiver_aq_dev_delay.vec());
if (!receiver_free_buffers.empty())
data_file.SaveVector(prefix + "/rcv_free_send_buffers", receiver_free_buffers.vec());
if (!packets_received.empty())
data_file.SaveVector(prefix + "/packets_received", packets_received.vec());
if (!packets_expected.empty())
data_file.SaveVector(prefix + "/packets_expected", packets_expected.vec());
if (!pixel_sum.empty())
data_file.SaveVector(prefix + "/pixel_sum", pixel_sum.vec());
if (!processing_time.empty())
data_file.SaveVector(prefix + "/processing_time", processing_time.vec())->Units("s");
data_file.SaveVector(prefix + "/data_collection_efficiency_image", efficiency.vec());
}

View File

@@ -8,6 +8,7 @@
#include "../common/AutoIncrVector.h"
class HDF5DataFilePluginDetector : public HDF5DataFilePlugin {
bool integrated_format = false;
AutoIncrVector<uint64_t> jf_info;
AutoIncrVector<uint8_t> storage_cell;
AutoIncrVector<uint64_t> receiver_aq_dev_delay;
@@ -18,6 +19,7 @@ class HDF5DataFilePluginDetector : public HDF5DataFilePlugin {
AutoIncrVector<int64_t> pixel_sum;
AutoIncrVector<float> processing_time;
public:
HDF5DataFilePluginDetector(const StartMessage& msg);
void OpenFile(HDF5File &data_file, const DataMessage& msg, size_t images_per_file) override;
void Write(const DataMessage& msg, uint64_t image_number) override;
void WriteFinal(HDF5File &data_file) override;

View File

@@ -11,9 +11,15 @@
#include "../common/time_utc.h"
#include "gemmi/symmetry.hpp"
namespace {
std::string GenFilename(const StartMessage &start) {
return fmt::format("{:s}_master.h5", start.file_prefix);
}
}
NXmx::NXmx(const StartMessage &start)
: start_message(start),
filename(start.file_prefix + "_master.h5") {
filename(GenFilename(start)) {
uint64_t tmp_suffix;
try {
if (!start.arm_date.empty())
@@ -31,7 +37,7 @@ NXmx::NXmx(const StartMessage &start)
bool v1_10 = (start.file_format == FileWriterFormat::NXmxVDS);
hdf5_file = std::make_unique<HDF5File>(tmp_filename, v1_10);
hdf5_file = std::make_shared<HDF5File>(tmp_filename, v1_10);
hdf5_file->Attr("file_name", filename);
hdf5_file->Attr("HDF5_Version", hdf5_version());
HDF5Group(*hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
@@ -52,6 +58,8 @@ NXmx::~NXmx() {
std::rename(tmp_filename.c_str(), filename.c_str());
}
std::string HDF5Metadata::DataFileName(const StartMessage &msg, int64_t file_number) {
if (file_number < 0)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
@@ -184,7 +192,7 @@ void NXmx::LinkToData_VDS(const StartMessage &start, const EndMessage &end) {
if (!start.az_int_bin_to_q.empty()) {
size_t azimuthal_bins = start.az_int_phi_bin_count.value_or(1);
size_t q_bins = start.az_int_q_bin_count.value_or(1);
if (q_bins > 0 & azimuthal_bins > 0) {
if (q_bins > 0 && azimuthal_bins > 0) {
VDS(start, "/entry/azint/image",
{total_images, azimuthal_bins, q_bins},
HDF5DataType(0.0f));
@@ -657,13 +665,17 @@ void NXmx::AzimuthalIntegration(const StartMessage &start, const EndMessage &end
HDF5Group az_int_group(*hdf5_file, "/entry/azint");
az_int_group.NXClass("NXcollection");
az_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q, dim)->Units("reciprocal Angstrom");
if (!start.az_int_bin_to_two_theta.empty())
az_int_group.SaveVector("bin_to_two_theta", start.az_int_bin_to_two_theta, dim)->Units("degrees");
if (!start.az_int_bin_to_phi.empty())
az_int_group.SaveVector("bin_to_phi", start.az_int_bin_to_phi, dim)->Units("degrees");
for (const auto &[x,y]: end.az_int_result)
az_int_group.SaveVector(x, y, dim);
if (start.file_format != FileWriterFormat::NXmxIntegrated) {
az_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q, dim)->Units("reciprocal Angstrom");
if (!start.az_int_bin_to_two_theta.empty())
az_int_group.SaveVector("bin_to_two_theta", start.az_int_bin_to_two_theta, dim)->Units("degrees");
if (!start.az_int_bin_to_phi.empty())
az_int_group.SaveVector("bin_to_phi", start.az_int_bin_to_phi, dim)->Units("degrees");
}
for (const auto &[x,y]: end.az_int_result) {
if (x != "image")
az_int_group.SaveVector(x, y, dim);
}
}
}
@@ -693,10 +705,17 @@ void NXmx::Finalize(const EndMessage &end) {
AzimuthalIntegration(start_message, end);
ADUHistogram(end);
if (start_message.file_format == FileWriterFormat::NXmxVDS)
LinkToData_VDS(start_message, end);
else
LinkToData(start_message, end);
switch (start_message.file_format.value_or(FileWriterFormat::NXmxLegacy)) {
case FileWriterFormat::NXmxLegacy:
LinkToData(start_message, end);
break;
case FileWriterFormat::NXmxVDS:
LinkToData_VDS(start_message, end);
break;
case FileWriterFormat::NXmxIntegrated:
default:
break;
}
if (end.rotation_lattice)
SaveVector(*hdf5_file, "/entry/MX/rotationLatticeIndexed", end.rotation_lattice->GetVector())
@@ -731,3 +750,7 @@ void NXmx::UserData(const StartMessage &start) {
}
}
}
std::shared_ptr<HDF5File> NXmx::GetFile() {
return hdf5_file;
}

View File

@@ -13,7 +13,7 @@ namespace HDF5Metadata {
}
class NXmx {
std::unique_ptr<HDF5File> hdf5_file;
std::shared_ptr<HDF5File> hdf5_file;
const StartMessage start_message;
const std::string filename;
std::string tmp_filename;
@@ -58,6 +58,8 @@ public:
NXmx& operator=(const NXmx &other) = delete;
void Finalize(const EndMessage &end);
void WriteCalibration(const CompressedImage &image);
std::shared_ptr<HDF5File> GetFile();
};
#endif //JUNGFRAUJOCH_HDF5NXMX_H

View File

@@ -315,76 +315,119 @@ void HDF5Fapl::SetVersionTo1p10orNewer() {
H5Pset_libver_bounds(id, H5F_LIBVER_V110, H5F_LIBVER_LATEST);
}
template <typename T>
static HDF5Object& WriteOrCreateScalarAttr(HDF5Object& object, const std::string& name, const T& val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = -1;
if (H5Aexists(object.GetID(), name.c_str()) > 0) {
attr_id = H5Aopen(object.GetID(), name.c_str(), H5P_DEFAULT);
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open attribute " + name);
hid_t existing_type = H5Aget_type(attr_id);
if (existing_type < 0) {
H5Aclose(attr_id);
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot get attribute type " + name);
}
const bool recreate =
(H5Tget_class(existing_type) != H5Tget_class(datatype.GetID())) ||
(H5Tget_size(existing_type) != H5Tget_size(datatype.GetID()));
H5Tclose(existing_type);
H5Aclose(attr_id);
if (recreate) {
if (H5Adelete(object.GetID(), name.c_str()) < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot delete attribute " + name);
attr_id = H5Acreate2(object.GetID(), name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
} else {
attr_id = H5Aopen(object.GetID(), name.c_str(), H5P_DEFAULT);
}
} else {
attr_id = H5Acreate2(object.GetID(), name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
}
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot create/open attribute " + name);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
return object;
}
HDF5Object & HDF5Object::Attr(const std::string &name, const std::string &val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
hid_t attr_id = -1;
if (H5Aexists(id, name.c_str()) > 0) {
attr_id = H5Aopen(id, name.c_str(), H5P_DEFAULT);
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open attribute " + name);
hid_t existing_type = H5Aget_type(attr_id);
if (existing_type < 0) {
H5Aclose(attr_id);
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot get attribute type " + name);
}
const bool recreate =
(H5Tget_class(existing_type) != H5T_STRING) ||
(H5Tget_size(existing_type) < val.length() + 1);
H5Tclose(existing_type);
H5Aclose(attr_id);
if (recreate) {
if (H5Adelete(id, name.c_str()) < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot delete attribute " + name);
attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
} else {
attr_id = H5Aopen(id, name.c_str(), H5P_DEFAULT);
}
} else {
attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
}
if (attr_id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot create/open attribute " + name);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), val.c_str());
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
if (ret < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Attribute write unsuccessful");
return *this;
}
HDF5Object & HDF5Object::Attr(const std::string &name, int32_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, uint32_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, int64_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, uint64_t val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, double val) {
HDF5DataSpace dataspace;
HDF5DataType datatype(val);
hid_t attr_id = H5Acreate2(id, name.c_str(), datatype.GetID(), dataspace.GetID(), H5P_DEFAULT, H5P_DEFAULT);
herr_t ret = H5Awrite(attr_id, datatype.GetID(), &val);
H5Aclose(attr_id);
if (ret < 0) throw JFJochException(JFJochExceptionCategory::HDF5, "Atrribute write unsucessful");
return *this;
return WriteOrCreateScalarAttr(*this, name, val);
}
HDF5Object & HDF5Object::Attr(const std::string &name, const std::vector<double> &val) {
@@ -632,14 +675,19 @@ HDF5Group::HDF5Group(const HDF5Object& parent, const std::string &name) : HDF5Gr
}
HDF5Group::HDF5Group(const HDF5Object& parent, const char *name) : HDF5Object() {
id = H5Gcreate(parent.GetID(), name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if (H5Lexists(parent.GetID(), name, H5P_DEFAULT) > 0)
id = H5Gopen(parent.GetID(), name, H5P_DEFAULT);
else
id = H5Gcreate(parent.GetID(), name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if (id < 0)
throw JFJochException(JFJochExceptionCategory::HDF5, "Cannot open/create HDF5 group " + std::string(name));
}
HDF5Group::~HDF5Group() {
H5Gclose(id);
}
HDF5File::HDF5File(const std::string& filename, bool v1_10) : HDF5Object() {
HDF5Fapl fapl;
@@ -894,6 +942,17 @@ std::vector<std::string> HDF5Object::FindLeafs(const std::string &name) const {
return ret;
}
bool HDF5Object::IsExternalLink(const std::string& name) const {
H5L_info2_t link_info;
// Get information about the link
if (H5Lget_info(id, name.c_str(), &link_info, H5P_DEFAULT) < 0)
throw JFJochException(JFJochExceptionCategory::HDF5,
"Failed to retrieve information about the link");
return (link_info.type == H5L_TYPE_EXTERNAL);
}
std::string HDF5Object::GetLinkedFileName(const std::string& name) const {
H5L_info2_t link_info;

View File

@@ -151,6 +151,7 @@ public:
const std::vector<hsize_t>& start,
const std::vector<hsize_t>& size);
bool Exists(const std::string& name) const;
bool IsExternalLink(const std::string& name) const;
std::string GetLinkedFileName(const std::string& name) const;
std::vector<std::string> FindLeafs(const std::string &name) const;
std::vector<hsize_t> GetDimension(const std::string &name);

21
xds-plugin/CMakeLists.txt Normal file
View File

@@ -0,0 +1,21 @@
ADD_LIBRARY(jfjoch_xds_plugin SHARED plugin.cpp plugin.h)
TARGET_LINK_LIBRARIES(jfjoch_xds_plugin JFJochHDF5Wrappers JFJochVersion Compression hdf5-static)
INSTALL(TARGETS jfjoch_xds_plugin DESTINATION ${CMAKE_INSTALL_LIBDIR}/jfjoch COMPONENT viewer)
target_link_options(jfjoch_xds_plugin PRIVATE
"LINKER:--exclude-libs,ALL"
)
set_target_properties(jfjoch_xds_plugin PROPERTIES
CXX_VISIBILITY_PRESET hidden
C_VISIBILITY_PRESET hidden
VISIBILITY_INLINES_HIDDEN YES
VERSION 1.0.0
)
if(UNIX AND NOT APPLE)
target_link_options(jfjoch_xds_plugin PRIVATE
"LINKER:--exclude-libs,ALL"
)
endif()

303
xds-plugin/plugin.cpp Normal file
View File

@@ -0,0 +1,303 @@
// SPDX-FileCopyrightText: 2025 Filip Leonarski, Paul Scherrer Institute <filip.leonarski@psi.ch>
// SPDX-License-Identifier: GPL-3.0-only
// Based on Durin plugin code from Diamond Light Source Ltd. and Global Phasing (BSD-3 license)
#include <algorithm>
#include <array>
#include <cstdint>
#include <cstdio>
#include <iostream>
#include <memory>
#include <mutex>
#include <shared_mutex>
#include <string>
#include <vector>
#include "../writer/HDF5Objects.h"
#include "plugin.h"
#include "JFJochDecompress.h"
#include "../common/GitInfo.h"
#include "../common/JFJochMessages.h"
namespace {
class PluginError : public std::runtime_error {
int err_code;
public:
PluginError(int code, const std::string &message) : std::runtime_error(message), err_code(code) {}
};
std::shared_mutex plugin_mutex;
std::unique_ptr<HDF5ReadOnlyFile> hdf5_file;
FileWriterFormat format = FileWriterFormat::NoFile;
uint64_t images_per_file = 0;
size_t image_size_x, image_size_y;
float pixel_size_x, pixel_size_y;
size_t total_image_number;
uint32_t pixel_byte_depth = 0;
bool pixel_signed = false;
std::vector<uint8_t> one_byte_mask;
template<class T>
void ConvertAndMaskTyped(const std::vector<uint8_t> &in_8bit,
int marker_value,
int *out) {
auto in = reinterpret_cast<const T *>(in_8bit.data());
size_t size = in_8bit.size() / sizeof(T);
for (size_t i = 0; i < size; ++i) {
if (!one_byte_mask.empty() && (one_byte_mask.at(i) == 1))
out[i] = -1;
else if (!one_byte_mask.empty() && (one_byte_mask.at(i) == 2))
out[i] = -2;
else if (marker_value != 0 && in[i] == static_cast<T>(marker_value))
out[i] = -1;
else if (std::is_signed_v<T> && in[i] < 0)
out[i] = -1;
else if (in[i] > INT32_MAX)
out[i] = INT32_MAX;
else
out[i] = static_cast<int>(in[i]);
}
}
void LoadDataset() {
std::string dataset_name;
if (hdf5_file->Exists("/entry/data/data")) {
format = FileWriterFormat::NXmxVDS;
dataset_name = "/entry/data/data";
} else if (hdf5_file->Exists("/entry/data/data_000001")) {
format = FileWriterFormat::NXmxLegacy;
dataset_name = "/entry/data/data_000001";
} else {
throw PluginError(-1, "Could not locate detector dataset");
}
HDF5DataSet data(*hdf5_file, dataset_name);
HDF5DataSpace dataspace(data);
HDF5DataType datatype(data);
auto dim = dataspace.GetDimensions();
if (dim.size() != 3)
throw PluginError(-1, "Wrong dimension of /entry/data/data");
image_size_x = dim[2];
image_size_y = dim[1];
images_per_file = dim[0];
total_image_number = dim[0];
if (format == FileWriterFormat::NXmxLegacy) {
int dataset = 2;
while (dataset < 100000) {
char name[255];
snprintf(name, sizeof(name), "/entry/data/data_%06d", dataset);
if (!hdf5_file->Exists(name))
break;
auto leg_dims = hdf5_file->GetDimension(name);
if (leg_dims.size() != 3)
throw PluginError(-1, "Wrong dimension of " + std::string(name));
if (leg_dims[2] != image_size_x)
throw PluginError(-1, "Image size of " + std::string(name) + " does not match");
if (leg_dims[1] != image_size_y)
throw PluginError(-1, "Image size of " + std::string(name) + " does not match");
total_image_number += leg_dims[0];
dataset++;
}
}
if (!datatype.IsInteger())
throw PluginError(-1, "Data type of /entry/data/data is not integer");
pixel_signed = datatype.IsSigned();
pixel_byte_depth = datatype.GetElemSize();
pixel_size_x = hdf5_file->GetFloat("/entry/instrument/detector/x_pixel_size");
pixel_size_y = hdf5_file->GetFloat("/entry/instrument/detector/y_pixel_size");
one_byte_mask = std::vector<uint8_t>(image_size_x * image_size_y, 0);
auto mask_tmp = hdf5_file->ReadVector<uint32_t>(
"/entry/instrument/detector/pixel_mask",
{0, 0},
{image_size_y, image_size_x}
);
for (int i = 0; i < mask_tmp.size(); i++) {
if (mask_tmp[i] & (1 << 30))
one_byte_mask[i] = 2;
else if (mask_tmp[i] & 0xFFFF)
one_byte_mask[i] = 1;
}
}
void ConvertToIntAndMask(const std::vector<uint8_t> &in_8bit, int *out_buffer) {
switch (pixel_byte_depth) {
case 1:
if (pixel_signed)
return ConvertAndMaskTyped<int8_t>(in_8bit, INT8_MAX, out_buffer);
else
return ConvertAndMaskTyped<uint8_t>(in_8bit, UINT8_MAX, out_buffer);
case 2:
if (pixel_signed)
return ConvertAndMaskTyped<int16_t>(in_8bit, INT16_MAX, out_buffer);
else
return ConvertAndMaskTyped<uint16_t>(in_8bit, UINT16_MAX, out_buffer);
case 4:
if (pixel_signed)
return ConvertAndMaskTyped<int32_t>(in_8bit, INT32_MAX, out_buffer);
else
return ConvertAndMaskTyped<uint32_t>(in_8bit, UINT32_MAX, out_buffer);
default:
throw PluginError(-1, "Unsupported conversion to int");
}
}
void FillInfoArray(int info[1024]) {
info[0] = 0x01;
info[1] = VERSION_MAJOR;
info[2] = VERSION_MINOR;
info[3] = VERSION_PATCH;
info[4] = VERSION_TIMESTAMP;
info[5] = 0;
info[6] = -1;
}
} // namespace
extern "C" {
void plugin_open(const char *filename, int info[1024], int *error_flag) {
std::unique_lock sl(plugin_mutex);
std::cout << "********** Jungfraujoch XDS plugin **********" << std::endl;
std::cout << "Jungfraujoch version " << jfjoch_version() << std::endl;
std::cout << "Plugin version " << VERSION_MAJOR << "." << VERSION_MINOR << "." << VERSION_PATCH << std::endl << std::endl;
std::cout << "Copyright (C) 2024-2026 Paul Scherrer Institute" << std::endl;
std::cout << "This program comes with ABSOLUTELY NO WARRANTY" << std::endl;
std::cout << "This is free software, and you are welcome to redistribute it" << std::endl;
std::cout << "under certain conditions (GPLv3)" << std::endl << std::endl;
std::cout << "Based on durin plugin from Diamond Light Source Ltd. with modification from the Global Phasing Ltd." << std::endl;
std::cout << "(BSD-3 license)" << std::endl << std::endl;
try {
FillInfoArray(info);
RegisterHDF5Filter();
hdf5_file = std::make_unique<HDF5ReadOnlyFile>(filename);
LoadDataset();
*error_flag = 0;
} catch (std::exception &e) {
std::cerr << e.what() << std::endl;
*error_flag = -1;
hdf5_file.reset();
total_image_number = 0;
}
}
void plugin_get_header(int *nx, int *ny, int *nbytes, float *qx, float *qy,
int *number_of_frames, int info[1024], int *error_flag) {
std::shared_lock sl(plugin_mutex);
try {
FillInfoArray(info);
if (!hdf5_file)
throw PluginError(-1, "HDF5 file not open");
*nx = image_size_x;
*ny = image_size_y;
*nbytes = pixel_byte_depth;
*number_of_frames = total_image_number;
*qx = pixel_size_x * 1e3; // mm
*qy = pixel_size_y * 1e3; // mm
*error_flag = 0;
} catch (std::exception &e) {
std::cerr << e.what() << std::endl;
*error_flag = -1;
}
}
void plugin_get_data(int *frame_number, int *nx, int *ny, int *data_array,
int info[1024], int *error_flag) {
std::shared_lock sl(plugin_mutex);
try {
if (!hdf5_file)
throw PluginError(-1, "HDF5 file not open");
FillInfoArray(info);
if (*frame_number <= 0 || *frame_number > total_image_number)
throw PluginError(-1, "Frame number out of range");
std::string dataset_name;
hsize_t image_id;
if (format == FileWriterFormat::NXmxLegacy) {
char str[256];
size_t dataset_index = (*frame_number - 1) / images_per_file + 1;
snprintf(str, sizeof(str), "/entry/data/data_%06ld", dataset_index);
dataset_name = std::string(str);
image_id = (*frame_number - 1) % images_per_file;
} else {
dataset_name = "/entry/data/data";
image_id = *frame_number - 1;
}
HDF5DataSet dataset(*hdf5_file, dataset_name);
HDF5Dcpl dcpl(dataset);
std::vector<uint8_t> tmp;
std::vector<hsize_t> start = {image_id, 0, 0};
CompressionAlgorithm algorithm = CompressionAlgorithm::NO_COMPRESSION;
auto chunk_size = dcpl.GetChunking();
if ((chunk_size.size() == 3)
&& (chunk_size[0] == 1)
&& (chunk_size[1] == image_size_y)
&& (chunk_size[2] == image_size_x)) {
dataset.ReadDirectChunk(tmp, start);
algorithm = dcpl.GetCompression();
} else {
dataset.ReadVectorToU8(tmp, start, {1, image_size_y, image_size_x});
algorithm = CompressionAlgorithm::NO_COMPRESSION;
}
if (algorithm != CompressionAlgorithm::NO_COMPRESSION) {
std::vector<uint8_t> decompressed_image(image_size_x * image_size_y * pixel_byte_depth);
JFJochDecompressPtr(decompressed_image.data(),
algorithm,
tmp.data(),
tmp.size(),
image_size_x * image_size_y,
pixel_byte_depth);
ConvertToIntAndMask(decompressed_image, data_array);
} else {
ConvertToIntAndMask(tmp, data_array);
}
*error_flag = 0;
} catch (std::exception &e) {
std::cerr << e.what() << std::endl;
*error_flag = -1;
}
}
void plugin_close(int *error_flag) {
std::unique_lock sl(plugin_mutex);
try {
hdf5_file.reset();
one_byte_mask.clear();
total_image_number = 0;
format = FileWriterFormat::NoFile;
images_per_file = 0;
image_size_x = 0;
image_size_y = 0;
pixel_size_x = 0;
pixel_size_y = 0;
*error_flag = 0;
} catch (std::exception &e) {
std::cerr << e.what() << std::endl;
*error_flag = -1;
}
}
} /* extern "C" */

46
xds-plugin/plugin.h Normal file
View File

@@ -0,0 +1,46 @@
// SPDX-FileCopyrightText: 2025 Filip Leonarski, Paul Scherrer Institute <filip.leonarski@psi.ch>
// SPDX-License-Identifier: GPL-3.0-only
// Based on Durin plugin code from Diamond Light Source Ltd. and Global Phasing (BSD-3 license)
/*
* External library interface for XDS.
* Ref: https://wiki.uni-konstanz.de/xds/index.php/LIB
*/
#pragma once
#if defined(_WIN32)
#if defined(jfjoch_xds_plugin_EXPORTS)
#define XDS_PLUGIN_API __declspec(dllexport)
#else
#define XDS_PLUGIN_API __declspec(dllimport)
#endif
#elif defined(__GNUC__) || defined(__clang__)
#define XDS_PLUGIN_API __attribute__((visibility("default")))
#else
#define XDS_PLUGIN_API
#endif
#ifdef __cplusplus
extern "C" {
#endif
#define VERSION_MAJOR 0
#define VERSION_MINOR 0
#define VERSION_PATCH 0
#define VERSION_TIMESTAMP -1 /* good enough for Dectris apparantely */
XDS_PLUGIN_API void plugin_open(const char *filename, int info[1024], int *error_flag);
XDS_PLUGIN_API void plugin_get_header(int *nx, int *ny, int *nbytes, float *qx, float *qy,
int *number_of_frames, int info[1024], int *error_flag);
XDS_PLUGIN_API void plugin_get_data(int *frame_number, int *nx, int *ny, int *data_array,
int info[1024], int *error_flag);
XDS_PLUGIN_API void plugin_close(int *error_flag);
#ifdef __cplusplus
} /* extern "C" */
#endif