Merge branch 'master' into eiger

This commit is contained in:
lhdamiani
2021-04-13 12:07:34 +02:00
13 changed files with 177 additions and 53 deletions
+4
View File
@@ -0,0 +1,4 @@
cmake-build-*/
docker/
docs/
scripts/
+4 -1
View File
@@ -34,5 +34,8 @@ add_subdirectory("jf-buffer-writer")
add_subdirectory("jf-assembler")
add_subdirectory("sf-stream")
add_subdirectory("sf-writer")
#add_subdirectory("jf-live-writer")
if(BUILD_JF_LIVE_WRITER)
add_subdirectory("jf-live-writer")
endif()
+10
View File
@@ -0,0 +1,10 @@
#!/bin/bash
VERSION=1.0.0
docker build --no-cache=true -f phdf5.Dockerfile -t paulscherrerinstitute/sf-daq_phdf5 .
docker tag paulscherrerinstitute/sf-daq_phdf5 paulscherrerinstitute/sf-daq_phdf5:$VERSION
docker login
docker push paulscherrerinstitute/sf-daq_phdf5:$VERSION
docker push paulscherrerinstitute/sf-daq_phdf5
+18
View File
@@ -0,0 +1,18 @@
FROM centos:centos7
RUN yum -y install centos-release-scl epel-release && \
yum -y update && \
yum -y install devtoolset-9 git cmake3 mpich-devel wget zeromq-devel
ENV PATH="/usr/lib64/mpich/bin:${PATH}"
ENV LD_LIBRARY_PATH="/usr/lib64/mpich/lib:${LD_LIBRARY_PATH}"
SHELL ["scl", "enable", "devtoolset-9"]
RUN wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.0/src/hdf5-1.12.0.tar.gz && \
tar -xzf hdf5-1.12.0.tar.gz
WORKDIR /hdf5-1.12.0
RUN ./configure --enable-parallel && make install
RUN ln -v -s `pwd`/hdf5/lib/* /usr/lib64/ && \
ln -v -s `pwd`/hdf5/include/* /usr/include/ && \
ln -v -s /usr/include/mpich-x86_64/* /usr/include/
-2
View File
@@ -31,8 +31,6 @@ target_link_libraries(jf-live-writer
jf-live-writer-lib
zmq
hdf5
hdf5_hl
hdf5_cpp
rt
)
+21
View File
@@ -1,5 +1,26 @@
# jf-live-writer
The jf-live-writer is packaged as a Docker container for development and
testing.
# Using the docker container
The easiest way to build and test the jf-live-writer is to use the
provided docker container. You need to start it from the project **root**:
```bash
docker build -f jf-live-writer/debug.Dockerfile -t jf-live-writer .
```
(Running this command from the project root is mandatory as the entire project
folder needs to be part of the build context.)
## Building
In order to build this executable you need to specify the cmake variable
```
cmake3 -DBUILD_JF_LIVE_WRITER=ON
```
The project will not build if you do not have installed the PHDF5 library.
Please follow instructions below on how to do that manually.
## Install PHDF5 manually
```
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.0/src/hdf5-1.12.0.tar.gz
+10
View File
@@ -0,0 +1,10 @@
FROM paulscherrerinstitute/sf-daq_phdf5:1.0.0
COPY . /sf_daq_buffer/
RUN mkdir /sf_daq_buffer/build && \
cd /sf_daq_buffer/build && \
cmake3 -DBUILD_JF_LIVE_WRITER=ON .. && \
make jf-live-writer
WORKDIR /sf_daq_buffer/build
+2 -1
View File
@@ -15,13 +15,14 @@ class JFH5Writer {
const std::string root_folder_;
const std::string detector_name_;
static const int64_t NO_RUN_ID;
static const int64_t NO_RUN_ID = -1;
// Run specific variables.
int64_t current_run_id_ = NO_RUN_ID;
uint32_t image_y_size_ = 0;
uint32_t image_x_size_ = 0;
uint32_t bits_per_pixel_ = 0;
uint32_t image_n_bytes_ = 0;
// Open file specific variables.
hid_t file_id_ = -1;
+7 -3
View File
@@ -1,6 +1,7 @@
#include <cstddef>
#include <formats.hpp>
#include <chrono>
#include "broker_format.hpp"
#ifndef SF_DAQ_BUFFER_FRAMESTATS_HPP
#define SF_DAQ_BUFFER_FRAMESTATS_HPP
@@ -9,9 +10,12 @@
class WriterStats {
const std::string detector_name_;
const size_t stats_modulo_;
const size_t image_n_bytes_;
uint32_t image_n_bytes_;
int image_counter_;
uint64_t total_bytes_;
uint32_t total_buffer_write_us_;
uint32_t max_buffer_write_us_;
std::chrono::time_point<std::chrono::steady_clock> stats_interval_start_;
@@ -22,8 +26,8 @@ class WriterStats {
public:
WriterStats(
const std::string &detector_name,
const size_t stats_modulo,
const size_t image_n_bytes);
const size_t stats_modulo);
void setup_run(const StoreStream& meta);
void start_image_write();
void end_image_write();
};
@@ -5,5 +5,5 @@ namespace live_writer_config
// N of IO threads to receive data from modules.
const int LIVE_ZMQ_IO_THREADS = 1;
const std::string OUTPUT_FOLDER_SYMLINK = "OUTPUT/"
const std::string OUTPUT_FOLDER_SYMLINK = "OUTPUT/";
}
+84 -33
View File
@@ -11,7 +11,6 @@
extern "C"
{
#include "H5DOpublic.h"
#include <bitshuffle/bshuf_h5filter.h>
}
@@ -21,7 +20,7 @@ using namespace live_writer_config;
JFH5Writer::JFH5Writer(const BufferUtils::DetectorConfig config):
root_folder_(config.buffer_folder),
detector_name_(config.detector_name),
detector_name_(config.detector_name)
{
}
@@ -30,6 +29,21 @@ JFH5Writer::~JFH5Writer()
close_file();
}
hid_t JFH5Writer::get_datatype(const int bits_per_pixel)
{
switch(bits_per_pixel) {
case 8:
return H5T_NATIVE_UINT8;
case 16:
return H5T_NATIVE_UINT16;
case 32:
return H5T_NATIVE_UINT32;
default:
throw runtime_error(
"Unsupported bits per pixel:" + to_string(bits_per_pixel));
}
}
void JFH5Writer::open_run(const int64_t run_id,
const uint32_t n_images,
const uint32_t image_y_size,
@@ -46,6 +60,7 @@ void JFH5Writer::open_run(const int64_t run_id,
image_y_size_ = image_y_size;
image_x_size_ = image_x_size;
bits_per_pixel_ = bits_per_pixel;
image_n_bytes_ = (image_y_size_ * image_x_size_ * bits_per_pixel_) / 8;
open_file(output_file, n_images);
}
@@ -58,6 +73,7 @@ void JFH5Writer::close_run()
image_y_size_ = 0;
image_x_size_ = 0;
bits_per_pixel_ = 0;
image_n_bytes_ = 0;
}
void JFH5Writer::open_file(const string& output_file, const uint32_t n_images)
@@ -188,39 +204,33 @@ void JFH5Writer::write_data(
throw runtime_error("Invalid run_id.");
}
// hsize_t b_i_dims[3] = {BUFFER_BLOCK_SIZE,
// MODULE_Y_SIZE * n_modules_,
// MODULE_X_SIZE};
// H5::DataSpace b_i_space(3, b_i_dims);
// hsize_t b_i_count[] = {n_images_to_copy,
// MODULE_Y_SIZE * n_modules_,
// MODULE_X_SIZE};
// hsize_t b_i_start[] = {n_images_offset, 0, 0};
// b_i_space.selectHyperslab(H5S_SELECT_SET, b_i_count, b_i_start);
//
// hsize_t f_i_dims[3] = {n_images_,
// MODULE_Y_SIZE * n_modules_,
// MODULE_X_SIZE};
// H5::DataSpace f_i_space(3, f_i_dims);
// hsize_t f_i_count[] = {n_images_to_copy,
// MODULE_Y_SIZE * n_modules_,
// MODULE_X_SIZE};
// hsize_t f_i_start[] = {data_write_index_, 0, 0};
// f_i_space.selectHyperslab(H5S_SELECT_SET, f_i_count, f_i_start);
//
// image_dataset_.write(
// data, H5::PredType::NATIVE_UINT16, b_i_space, f_i_space);
const hsize_t ram_dims[3] = {1, image_y_size_, image_x_size_};
auto ram_ds = H5Screate_simple(3, ram_dims, nullptr);
if (ram_ds < 0) {
throw runtime_error("Cannot create image ram dataspace.");
}
hsize_t offset[] = {data_write_index_, 0, 0};
size_t data_offset = i_image * MODULE_N_BYTES * n_modules_;
auto file_ds = H5Dget_space(image_dataset_id_);
if (file_ds < 0) {
throw runtime_error("Cannot get image dataset file dataspace.");
}
H5DOwrite_chunk(
image_dataset_.getId(),
H5P_DEFAULT,
0,
offset,
MODULE_N_BYTES * n_modules_,
data + data_offset);
const hsize_t file_ds_start[] = {index, 0, 0};
const hsize_t file_ds_stride[] = {1, 1, 1};
const hsize_t file_ds_count[] = {1, image_y_size_, image_x_size_};
const hsize_t file_ds_block[] = {1, 1, 1};
if (H5Sselect_hyperslab(file_ds, H5S_SELECT_SET,
file_ds_start, file_ds_stride, file_ds_count, file_ds_block) < 0) {
throw runtime_error("Cannot select image dataset file hyperslab.");
}
if (H5Dwrite(image_dataset_id_, get_datatype(bits_per_pixel_),
ram_ds, file_ds, H5P_DEFAULT, data) < 0) {
throw runtime_error("Cannot write data to image dataset.");
}
H5Sclose(file_ds);
H5Sclose(ram_ds);
}
void JFH5Writer::write_meta(
@@ -230,5 +240,46 @@ void JFH5Writer::write_meta(
throw runtime_error("Invalid run_id.");
}
const hsize_t ram_dims[3] = {1, 1, 1};
auto ram_ds = H5Screate_simple(3, ram_dims, nullptr);
if (ram_ds < 0) {
throw runtime_error("Cannot create metadata ram dataspace.");
}
auto file_ds = H5Dget_space(pulse_dataset_id_);
if (file_ds < 0) {
throw runtime_error("Cannot get metadata dataset file dataspace.");
}
const hsize_t file_ds_start[] = {index, 0, 0};
const hsize_t file_ds_stride[] = {1, 1, 1};
const hsize_t file_ds_count[] = {1, 1, 1};
const hsize_t file_ds_block[] = {1, 1, 1};
if (H5Sselect_hyperslab(file_ds, H5S_SELECT_SET,
file_ds_start, file_ds_stride, file_ds_count, file_ds_block) < 0) {
throw runtime_error("Cannot select metadata dataset file hyperslab.");
}
if (H5Dwrite(pulse_dataset_id_, H5T_NATIVE_UINT64,
ram_ds, file_ds, H5P_DEFAULT, &(meta.pulse_id)) < 0) {
throw runtime_error("Cannot write data to pulse_id dataset.");
}
if (H5Dwrite(frame_dataset_id_, H5T_NATIVE_UINT64,
ram_ds, file_ds, H5P_DEFAULT, &(meta.frame_index)) < 0) {
throw runtime_error("Cannot write data to frame_index dataset.");
}
if (H5Dwrite(daq_rec_dataset_id_, H5T_NATIVE_UINT32,
ram_ds, file_ds, H5P_DEFAULT, &(meta.daq_rec)) < 0) {
throw runtime_error("Cannot write data to daq_rec dataset.");
}
if (H5Dwrite(is_good_dataset_id_, H5T_NATIVE_UINT32,
ram_ds, file_ds, H5P_DEFAULT, &(meta.is_good_image)) < 0) {
throw runtime_error("Cannot write data to is_good_image dataset.");
}
H5Sclose(file_ds);
H5Sclose(ram_ds);
}
+11 -4
View File
@@ -6,11 +6,9 @@ using namespace chrono;
WriterStats::WriterStats(
const string& detector_name,
const size_t stats_modulo,
const size_t image_n_bytes) :
const size_t stats_modulo) :
detector_name_(detector_name),
stats_modulo_(stats_modulo),
image_n_bytes_(image_n_bytes)
stats_modulo_(stats_modulo)
{
reset_counters();
}
@@ -20,6 +18,7 @@ void WriterStats::reset_counters()
image_counter_ = 0;
total_buffer_write_us_ = 0;
max_buffer_write_us_ = 0;
total_bytes_ = 0;
}
void WriterStats::start_image_write()
@@ -27,9 +26,17 @@ void WriterStats::start_image_write()
stats_interval_start_ = steady_clock::now();
}
void WriterStats::setup_run(const StoreStream& meta)
{
image_n_bytes_ = (meta.image_y_size *
meta.image_x_size *
meta.bits_per_pixel) / 8;
}
void WriterStats::end_image_write()
{
image_counter_++;
total_bytes_ += image_n_bytes_;
uint32_t write_us_duration = duration_cast<microseconds>(
steady_clock::now()-stats_interval_start_).count();
+5 -8
View File
@@ -18,17 +18,14 @@ int main (int argc, char *argv[])
{
if (argc != 3) {
cout << endl;
cout << "Usage: jf_live_writer [detector_json_filename]"
" [bits_per_pixel]" << endl;
cout << "Usage: jf_live_writer [detector_json_filename]" << endl;
cout << "\tdetector_json_filename: detector config file path." << endl;
cout << "\tbits_per_pixel: Number of bits in each pixel." << endl;
cout << endl;
exit(-1);
}
auto const config = BufferUtils::read_json_config(string(argv[1]));
auto const bits_per_pixel = atoi(argv[2]);
MPI_Init(NULL, NULL);
@@ -45,11 +42,8 @@ int main (int argc, char *argv[])
RamBuffer ram_buffer(config.detector_name, config.n_modules);
const uint64_t image_n_bytes =
config.image_y_size * config.image_x_size * bits_per_pixel;
JFH5Writer writer(config);
WriterStats stats(config.detector_name, STATS_MODULO, image_n_bytes);
WriterStats stats(config.detector_name, STATS_MODULO);
StoreStream meta = {};
while (true) {
@@ -61,6 +55,9 @@ int main (int argc, char *argv[])
meta.image_y_size,
meta.image_x_size,
meta.bits_per_pixel);
stats.setup_run(meta);
continue;
}