Merge branch '2404-pusher-performance' into 'main'

Refactor how frames are sent from receiver

See merge request jungfraujoch/nextgendcu!45
This commit is contained in:
2024-04-14 21:47:06 +02:00
85 changed files with 1385 additions and 3656 deletions

View File

@@ -679,6 +679,16 @@ void JFJochBrokerHttp::plot_receiver_delay_post(const org::openapitools::server:
ProcessOutput(Convert(plot), response);
}
void JFJochBrokerHttp::plot_receiver_free_send_buffers_post(
const org::openapitools::server::model::Plot_request &plotRequest,
Pistache::Http::ResponseWriter &response) {
PlotRequest req{.type = PlotType::ReceiverFreeSendBuf, .binning = 0};
if (plotRequest.binningIsSet())
req.binning = plotRequest.getBinning();
auto plot = state_machine.GetPlots(req);
ProcessOutput(Convert(plot), response);
}
void JFJochBrokerHttp::plot_roi_sum_post(const org::openapitools::server::model::Plot_request &plotRequest,
Pistache::Http::ResponseWriter &response) {
PlotRequest req{.type = PlotType::ROISum, .binning = 0};

View File

@@ -48,6 +48,9 @@ class JFJochBrokerHttp : public org::openapitools::server::api::DefaultApi {
void plot_strong_pixel_post(const org::openapitools::server::model::Plot_request &plotRequest,
Pistache::Http::ResponseWriter &response) override;
void plot_receiver_free_send_buffers_post(const org::openapitools::server::model::Plot_request &plotRequest,
Pistache::Http::ResponseWriter &response) override;
void plot_receiver_delay_post(const org::openapitools::server::model::Plot_request &plotRequest,
Pistache::Http::ResponseWriter &response) override;

View File

@@ -7,7 +7,6 @@
#include "../common/DiffractionExperiment.h"
#include "JFJochBrokerHttp.h"
#include "../acquisition_device/AcquisitionDeviceGroup.h"
#include "../frame_serialize/ZMQStream2PreviewPublisher.h"
DetectorGeometry ParseStandardDetectorGeometry(const nlohmann::json &j);
DetectorGeometry ParseCustomDetectorGeometry(const nlohmann::json &j);

View File

@@ -55,6 +55,7 @@ void DefaultApi::setupRoutes() {
Routes::Get(*router, base + "/plot/rad_int", Routes::bind(&DefaultApi::plot_rad_int_get_handler, this));
Routes::Get(*router, base + "/plot/rad_int_per_file", Routes::bind(&DefaultApi::plot_rad_int_per_file_get_handler, this));
Routes::Post(*router, base + "/plot/receiver_delay", Routes::bind(&DefaultApi::plot_receiver_delay_post_handler, this));
Routes::Post(*router, base + "/plot/receiver_free_send_buffers", Routes::bind(&DefaultApi::plot_receiver_free_send_buffers_post_handler, this));
Routes::Get(*router, base + "/plot/resolution_estimate_histogram", Routes::bind(&DefaultApi::plot_resolution_estimate_histogram_get_handler, this));
Routes::Post(*router, base + "/plot/roi_max_count", Routes::bind(&DefaultApi::plot_roi_max_count_post_handler, this));
Routes::Post(*router, base + "/plot/roi_sum", Routes::bind(&DefaultApi::plot_roi_sum_post_handler, this));
@@ -655,6 +656,39 @@ void DefaultApi::plot_receiver_delay_post_handler(const Pistache::Rest::Request
response.send(Pistache::Http::Code::Internal_Server_Error, e.what());
}
}
void DefaultApi::plot_receiver_free_send_buffers_post_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response) {
try {
// Getting the body param
Plot_request plotRequest;
try {
nlohmann::json::parse(request.body()).get_to(plotRequest);
plotRequest.validate();
} catch (std::exception &e) {
const std::pair<Pistache::Http::Code, std::string> errorInfo = this->handleParsingException(e);
response.send(errorInfo.first, errorInfo.second);
return;
}
try {
this->plot_receiver_free_send_buffers_post(plotRequest, response);
} catch (Pistache::Http::HttpError &e) {
response.send(static_cast<Pistache::Http::Code>(e.code()), e.what());
return;
} catch (std::exception &e) {
const std::pair<Pistache::Http::Code, std::string> errorInfo = this->handleOperationException(e);
response.send(errorInfo.first, errorInfo.second);
return;
}
} catch (std::exception &e) {
response.send(Pistache::Http::Code::Internal_Server_Error, e.what());
}
}
void DefaultApi::plot_resolution_estimate_histogram_get_handler(const Pistache::Rest::Request &, Pistache::Http::ResponseWriter response) {
try {

View File

@@ -80,6 +80,7 @@ private:
void plot_rad_int_get_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_rad_int_per_file_get_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_receiver_delay_post_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_receiver_free_send_buffers_post_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_resolution_estimate_histogram_get_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_roi_max_count_post_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
void plot_roi_sum_post_handler(const Pistache::Rest::Request &request, Pistache::Http::ResponseWriter response);
@@ -284,6 +285,14 @@ private:
/// <param name="plotRequest"> (optional)</param>
virtual void plot_receiver_delay_post(const org::openapitools::server::model::Plot_request &plotRequest, Pistache::Http::ResponseWriter &response) = 0;
/// <summary>
/// Generate receiver free send buffer plot
/// </summary>
/// <remarks>
/// Amount of send buffers available during frame processing - used for internal debugging; binning is configurable
/// </remarks>
/// <param name="plotRequest"> (optional)</param>
virtual void plot_receiver_free_send_buffers_post(const org::openapitools::server::model::Plot_request &plotRequest, Pistache::Http::ResponseWriter &response) = 0;
/// <summary>
/// Generate resolution estimate histogram
/// </summary>
/// <remarks>

View File

@@ -61,7 +61,7 @@ public:
/// Dataset_settings members
/// <summary>
/// For standard synchrotron data collection - this is number of images collected per one TTL trigger For XFEL mode - this number is ignored and set to 1 For storage cell mode - this number is ignored and set to number of storage cells
/// For standard synchrotron data collection - this is number of images collected per one TTL trigger For XFEL (pulsed source) - this number is ignored and set to 1 For storage cell mode - this number is ignored and set to number of storage cells
/// </summary>
int64_t getImagesPerTrigger() const;
void setImagesPerTrigger(int64_t const value);
@@ -75,7 +75,7 @@ public:
bool ntriggerIsSet() const;
void unsetNtrigger();
/// <summary>
/// FPGA frame summation. For summation above two 32-bit pixel format will be used, unless explicitly specified.
/// FPGA frame summation. For summation above two 32-bit pixel format will be used, unless explicitly specified. Frame summation factor applies only to conversion mode (assumed as 1 for raw data). In XFEL mode: summation happens for frames collected with multiple triggers. Ignored for storage cells (assumed as 1).
/// </summary>
int64_t getSummation() const;
void setSummation(int64_t const value);

View File

@@ -1345,6 +1345,30 @@ paths:
schema:
type: string
description: Exception error
/plot/receiver_free_send_buffers:
post:
summary: Generate receiver free send buffer plot
description: Amount of send buffers available during frame processing - used for internal debugging; binning is configurable
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/plot_request'
responses:
"200":
description: Everything OK
content:
application/json:
schema:
$ref: '#/components/schemas/plots'
"400":
description: Input parsing or validation error
content:
text/plain:
schema:
type: string
description:
Exception error
/plot/image_collection_efficiency:
post:
summary: Generate image collection efficiency plot

View File

@@ -71,7 +71,6 @@ int main (int argc, char **argv) {
std::unique_ptr<JFJochReceiverService> receiver;
std::unique_ptr<ZMQStream2PusherGroup> image_pusher;
std::unique_ptr<ZMQStream2PreviewPublisher> preview_publisher;
ZMQContext context;
@@ -90,13 +89,8 @@ int main (int argc, char **argv) {
zmq_send_watermark,
zmq_send_buffer_size);
receiver = std::make_unique<JFJochReceiverService>(aq_devices, logger, *image_pusher);
std::string zmq_preview_addr = ParseString(input, "zmq_preview_addr");
if (!zmq_preview_addr.empty()) {
preview_publisher = std::make_unique<ZMQStream2PreviewPublisher>(context, zmq_preview_addr);
receiver->PreviewPublisher(preview_publisher.get());
}
int32_t send_buffer_size_MiB = ParseInt32(input, "send_buffer_size_MiB", 2048);
receiver = std::make_unique<JFJochReceiverService>(aq_devices, logger, *image_pusher, send_buffer_size_MiB);
std::string numa_policy = ParseString(input, "numa_policy");
if (!numa_policy.empty())

File diff suppressed because one or more lines are too long

View File

@@ -44,16 +44,14 @@ ADD_LIBRARY( JFJochCommon STATIC
Plot.h
../fpga/include/jfjoch_fpga.h
ZMQWrappers.cpp ZMQWrappers.h
DatasetSettings.cpp
DatasetSettings.h
ROIMask.cpp
ROIMask.h
ROIElement.cpp
ROIElement.h
ROICircle.cpp
ROICircle.h
ROIBox.cpp
ROIBox.h
DatasetSettings.cpp DatasetSettings.h
ROIMask.cpp ROIMask.h
ROIElement.cpp ROIElement.h
ROICircle.cpp ROICircle.h
ROIBox.cpp ROIBox.h
SendBuffer.cpp SendBuffer.h
SendBufferControl.cpp SendBufferControl.h
time_utc.h
)
TARGET_LINK_LIBRARIES(JFJochCommon Compression JFCalibration "$<BUILD_INTERFACE:libzmq-static>" -lrt)

View File

@@ -1170,3 +1170,7 @@ int64_t DiffractionExperiment::GetImagesPerFile() const {
return tmp;
}
int64_t DiffractionExperiment::GetSendBufferLocationSize() const {
return GetMaxCompressedSize() + 1024 * 1024;
}

View File

@@ -212,6 +212,7 @@ public:
std::chrono::nanoseconds GetDetectorDelay() const;
int64_t GetMaxCompressedSize() const;
int64_t GetSendBufferLocationSize() const;
int64_t GetDataStreamsNum() const;
int64_t GetModulesNum(uint16_t data_stream) const;

View File

@@ -8,7 +8,7 @@
#include <string>
enum class PlotType {BkgEstimate, RadInt, RadIntPerFile, SpotCount, IndexingRate, IndexingRatePerFile,
ErrorPixels, ImageCollectionEfficiency, ReceiverDelay, StrongPixels,
ErrorPixels, ImageCollectionEfficiency, ReceiverDelay, ReceiverFreeSendBuf, StrongPixels,
ROISum, ROIMaxCount, ROIPixels,
ResEstimation};

71
common/SendBuffer.cpp Normal file
View File

@@ -0,0 +1,71 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifdef JFJOCH_USE_NUMA
#include <numa.h>
#endif
#include <sys/mman.h>
#include "SendBuffer.h"
#include "JFJochException.h"
SendBuffer::SendBuffer(size_t in_buffer_size)
: buffer_size(in_buffer_size) {
#ifdef JFJOCH_USE_NUMA
buffer = (uint8_t *) numa_alloc_interleaved(buffer_size);
#else
buffer = (uint8_t *) mmap (nullptr, buffer_size, PROT_READ | PROT_WRITE,
MAP_PRIVATE|MAP_ANONYMOUS, -1, 0) ;
#endif
if (buffer == nullptr)
throw JFJochException(JFJochExceptionCategory::MemAllocFailed, "Failed to allocate communication buffer");
memset(buffer, 0, in_buffer_size);
}
SendBuffer::~SendBuffer() {
#ifdef JFJOCH_USE_NUMA
numa_free(buffer, buffer_size);
#else
munmap(buffer, buffer_size);
#endif
}
void SendBuffer::SetBufferLocationSize(size_t in_location_size) {
if (buffer_size < in_location_size)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid, "Buffer is to small to hold even 1 image");
location_size = in_location_size;
location_number = buffer_size / in_location_size;
}
size_t SendBuffer::GetBufferLocationSize() const {
return location_size;
}
size_t SendBuffer::GetBufferLocationID(const uint8_t *ptr) const {
if (location_size == 0)
throw JFJochException(JFJochExceptionCategory::WrongNumber, "Buffer not initialized");
if (ptr < buffer)
throw JFJochException(JFJochExceptionCategory::WrongNumber, "Pointer outside of buffer location");
if (ptr >= buffer + buffer_size)
throw JFJochException(JFJochExceptionCategory::WrongNumber, "Pointer outside of buffer location");
return (ptr - buffer) / location_size;
}
size_t SendBuffer::GetNumOfLocations() const {
if (location_size == 0)
throw JFJochException(JFJochExceptionCategory::WrongNumber, "Buffer not initialized");
return location_number;
}
uint8_t *SendBuffer::GetBufferLocation(size_t id) {
if (location_size == 0)
throw JFJochException(JFJochExceptionCategory::WrongNumber, "Buffer not initialized");
if (id >= location_number)
throw JFJochException(JFJochExceptionCategory::ArrayOutOfBounds,
"Image entry out of buffer bounds");
return buffer + location_size * id;
}

28
common/SendBuffer.h Normal file
View File

@@ -0,0 +1,28 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifndef JUNGFRAUJOCH_SENDBUFFER_H
#define JUNGFRAUJOCH_SENDBUFFER_H
#include <cstddef>
#include <cstdint>
class SendBuffer {
const size_t buffer_size;
size_t location_size = 0;
size_t location_number = 0;
uint8_t *buffer;
public:
explicit SendBuffer(size_t buffer_size);
~SendBuffer();
void SetBufferLocationSize(size_t location_size);
size_t GetBufferLocationSize() const;
size_t GetNumOfLocations() const;
size_t GetBufferLocationID(const uint8_t *ptr) const;
uint8_t *GetBufferLocation(size_t id); // image metadata + image
};
#endif //JUNGFRAUJOCH_SENDBUFFER_H

View File

@@ -0,0 +1,36 @@
// Copyright (2019-2024) Paul Scherrer Institute
#include "SendBufferControl.h"
SendBufferControl::SendBufferControl(const DiffractionExperiment &experiment,
SendBuffer &in_buffer)
: buffer(in_buffer) {
buffer.SetBufferLocationSize(experiment.GetSendBufferLocationSize());
for (int i = 0; i < buffer.GetNumOfLocations(); i++) {
send_buffer_avail.Put(i);
send_buffer_zero_copy_ret_val.push_back(ZeroCopyReturnValue(buffer.GetBufferLocation(i),
send_buffer_avail,
i));
}
}
bool SendBufferControl::CheckIfBufferReturned(std::chrono::microseconds timeout) {
uint32_t val;
for (int i = 0; i < buffer.GetNumOfLocations(); i++) {
if (!send_buffer_avail.GetTimeout(val, timeout))
return false;
}
return true;
}
ZeroCopyReturnValue *SendBufferControl::GetBufLocation() {
uint32_t location;
if (send_buffer_avail.Get(location))
return &send_buffer_zero_copy_ret_val[location];
else
return nullptr;
}
size_t SendBufferControl::GetAvailBufLocations() {
return send_buffer_avail.Size();
}

View File

@@ -0,0 +1,23 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifndef JUNGFRAUJOCH_SENDBUFFERCONTROL_H
#define JUNGFRAUJOCH_SENDBUFFERCONTROL_H
#include "SendBuffer.h"
#include "ThreadSafeFIFO.h"
#include "ZeroCopyReturnValue.h"
#include "DiffractionExperiment.h"
class SendBufferControl {
ThreadSafeFIFO<uint32_t> send_buffer_avail;
std::vector<ZeroCopyReturnValue> send_buffer_zero_copy_ret_val;
SendBuffer &buffer;
public:
SendBufferControl(const DiffractionExperiment& experiment, SendBuffer &buffer);
ZeroCopyReturnValue *GetBufLocation();
size_t GetAvailBufLocations();
bool CheckIfBufferReturned(std::chrono::microseconds timeout);
};
#endif //JUNGFRAUJOCH_SENDBUFFERCONTROL_H

View File

@@ -59,6 +59,21 @@ public:
return tmp;
};
int GetTimeout(T &val, std::chrono::microseconds timeout) {
std::unique_lock<std::mutex> ul(m);
if (queue.empty())
c_empty.wait_for(ul, timeout, [&]{return !queue.empty();});
if (queue.empty())
return 0;
else {
val = queue.front();
queue.pop();
c_full.notify_one();
return 1;
}
}
size_t Size() const {
return queue.size();
}

View File

@@ -6,11 +6,18 @@
#include "ThreadSafeFIFO.h"
class ZeroCopyReturnValue {
void *ptr;
uint32_t handle;
ThreadSafeFIFO<uint32_t> *fifo;
public:
ZeroCopyReturnValue(ThreadSafeFIFO<uint32_t> &in_fifo, uint32_t in_handle) :
fifo(&in_fifo), handle(in_handle) {}
ZeroCopyReturnValue(void *in_ptr,
ThreadSafeFIFO<uint32_t> &in_fifo,
uint32_t in_handle) :
ptr(in_ptr), fifo(&in_fifo), handle(in_handle) {}
void *get_ptr() const {
return ptr;
}
void release() const {
fifo->PutBlocking(handle);

19
common/time_utc.h Normal file
View File

@@ -0,0 +1,19 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifndef JUNGFRAUJOCH_TIME_UTC_H
#define JUNGFRAUJOCH_TIME_UTC_H
#include <string>
#include <chrono>
inline std::string time_UTC(const std::chrono::time_point<std::chrono::system_clock> &input) {
auto time_ms = std::chrono::duration_cast<std::chrono::milliseconds>(input.time_since_epoch()).count();
char buf1[255], buf2[255];
time_t time = time_ms / (1000);
strftime(buf1, sizeof(buf1), "%FT%T", gmtime(&time));
snprintf(buf2, sizeof(buf2), ".%06ld", time_ms%1000);
return std::string(buf1) + std::string(buf2) + "Z";
}
#endif //JUNGFRAUJOCH_TIME_UTC_H

View File

@@ -309,14 +309,9 @@ void GetCBORMultidimTypedArray(CompressedImage &v, CborValue &value) {
}
bool CheckMagicNumber(CborValue &v) {
auto key = GetCBORString(v);
if (key != "magic_number") {
cbor_value_advance(&v);
return false;
} else {
return GetCBORUInt(v) == user_data_magic_number;
}
void CheckMagicNumber(CborValue &v) {
if (GetCBORUInt(v) != user_data_magic_number)
throw JFJochException(JFJochExceptionCategory::CBORError, "Inconsistency between Jungfraujoch server and writer");
}
inline SpotToSave GetSpot(CborValue& value) {
@@ -370,6 +365,8 @@ void CBORStream2Deserializer::DecodeType(CborValue &value) {
msg_type = Type::END;
else if (type_str == "image")
msg_type = Type::IMAGE;
else if (type_str == "calibration")
msg_type = Type::CALIBRATION;
else
throw JFJochException(JFJochExceptionCategory::CBORError, "Unknown message type");
}
@@ -423,7 +420,7 @@ bool CBORStream2Deserializer::ProcessImageMessageElement(CborValue &value) {
if (key == "image_id")
data_message.number = GetCBORInt(value);
else if (key == "data")
ProcessImageData(value);
ProcessImageData(value, data_message.image);
else if (key == "series_unique_id")
data_message.series_unique_id = GetCBORString(value);
else if (key == "series_id")
@@ -450,6 +447,8 @@ bool CBORStream2Deserializer::ProcessImageMessageElement(CborValue &value) {
data_message.jf_info = GetCBORUInt(value) & UINT32_MAX;
else if (key == "receiver_aq_dev_delay")
data_message.receiver_aq_dev_delay = GetCBORInt(value);
else if (key == "receiver_free_send_buf")
data_message.receiver_free_send_buf = GetCBORInt(value);
else if (key == "storage_cell")
data_message.storage_cell = GetCBORUInt(value) & UINT32_MAX;
else if (key == "xfel_pulse_id")
@@ -482,23 +481,6 @@ bool CBORStream2Deserializer::ProcessImageMessageElement(CborValue &value) {
}
}
void CBORStream2Deserializer::ProcessCalibration(CborValue &value) {
// For calibration and mask - image might be accessed later on - so it is saved in internal storage of the object
// It allows the original start message to be lost
CborValue map_value;
cborErr(cbor_value_enter_container(&value, &map_value));
while (! cbor_value_at_end(&map_value)) {
auto key = GetCBORString(map_value);
CompressedImage image;
image.channel = key;
GetCBORMultidimTypedArray(image, map_value);
image.Save();
start_message.calibration.emplace_back(std::move(image));
}
cborErr(cbor_value_leave_container(&value, &map_value));
}
void CBORStream2Deserializer::ProcessPixelMaskElement(CborValue &value) {
// For calibration and mask - image might be accessed later on - so it is saved in internal storage of the object
// It allows the original start message to be lost
@@ -509,7 +491,6 @@ void CBORStream2Deserializer::ProcessPixelMaskElement(CborValue &value) {
CompressedImage image;
image.channel = key;
GetCBORMultidimTypedArray(image, map_value);
image.Save();
start_message.pixel_mask.emplace_back(std::move(image));
}
cborErr(cbor_value_leave_container(&value, &map_value));
@@ -651,6 +632,8 @@ void CBORStream2Deserializer::ProcessStartUserData(CborValue &value) {
start_message.gain_file_names = j["gain_file_names"];
if (j.contains("roi_names"))
start_message.roi_names = j["roi_names"];
if (j.contains("write_master_file"))
start_message.write_master_file = j["write_master_file"];
} catch (const std::exception &e) {
throw JFJochException(JFJochExceptionCategory::CBORError, "Cannot parse user_data as valid JSON " + std::string(e.what()));
@@ -663,7 +646,9 @@ bool CBORStream2Deserializer::ProcessStartMessageElement(CborValue &value) {
else {
auto key = GetCBORString(value);
try {
if (key == "beam_center_x")
if (key == "magic_number")
CheckMagicNumber(value);
else if (key == "beam_center_x")
start_message.beam_center_x = GetCBORFloat(value);
else if (key == "beam_center_y")
start_message.beam_center_y = GetCBORFloat(value);
@@ -756,8 +741,6 @@ bool CBORStream2Deserializer::ProcessStartMessageElement(CborValue &value) {
start_message.storage_cell_number = GetCBORUInt(value);
else if (key == "storage_cell_delay")
start_message.storage_cell_delay_ns = GetRational(value).first;
else if (key == "calibration")
ProcessCalibration(value);
else {
if (cbor_value_is_tag(&value))
cbor_value_advance(&value);
@@ -770,6 +753,26 @@ bool CBORStream2Deserializer::ProcessStartMessageElement(CborValue &value) {
}
}
bool CBORStream2Deserializer::ProcessCalibration(CborValue &value) {
if (cbor_value_at_end(&value))
return false;
else {
auto key = GetCBORString(value);
try {
if (key == "data") {
ProcessImageData(value, calibration);
} else {
if (cbor_value_is_tag(&value))
cbor_value_advance(&value);
cbor_value_advance(&value);
}
return true;
} catch (const JFJochException &e) {
throw JFJochException(JFJochExceptionCategory::CBORError, "Error processing calibration message, key " + key + ":" + e.what());
}
}
}
bool CBORStream2Deserializer::ProcessEndMessageElement(CborValue &value) {
if (cbor_value_at_end(&value))
return false;
@@ -792,8 +795,6 @@ bool CBORStream2Deserializer::ProcessEndMessageElement(CborValue &value) {
end_message.max_receiver_delay = GetCBORUInt(value);
else if (key == "data_collection_efficiency")
end_message.efficiency = GetCBORFloat(value);
else if (key == "write_master_file")
end_message.write_master_file = GetCBORBool(value);
else if (key == "az_int_result")
ProcessRadIntResultElement(value);
else if (key == "adu_histogram")
@@ -809,7 +810,7 @@ bool CBORStream2Deserializer::ProcessEndMessageElement(CborValue &value) {
}
}
void CBORStream2Deserializer::ProcessImageData(CborValue &value) {
void CBORStream2Deserializer::ProcessImageData(CborValue &value, CompressedImage& image) {
if (!cbor_value_is_map(&value))
throw JFJochException(JFJochExceptionCategory::CBORError, "Map expected");
@@ -821,8 +822,8 @@ void CBORStream2Deserializer::ProcessImageData(CborValue &value) {
cborErr(cbor_value_enter_container(&value, &map_value));
data_message.image.channel = GetCBORString(map_value);
GetCBORMultidimTypedArray(data_message.image, map_value);
image.channel = GetCBORString(map_value);
GetCBORMultidimTypedArray(image, map_value);
cborErr(cbor_value_leave_container(&value, &map_value));
}
@@ -834,6 +835,7 @@ void CBORStream2Deserializer::Process(const std::vector<uint8_t> &buffer) {
void CBORStream2Deserializer::Process(const uint8_t *msg, size_t msg_size) {
std::unique_lock<std::mutex> ul(m);
data_message = DataMessage();
calibration = CompressedImage();
CborParser parser;
CborValue value;
@@ -844,7 +846,6 @@ void CBORStream2Deserializer::Process(const uint8_t *msg, size_t msg_size) {
if (GetCBORTag(value) != CborSignatureTag)
throw JFJochException(JFJochExceptionCategory::CBORError, "CBOR must start with dedicated tag");
if (cbor_value_is_map(&value)) {
CborValue map_value;
cborErr(cbor_value_enter_container(&value, &map_value));
@@ -854,6 +855,9 @@ void CBORStream2Deserializer::Process(const uint8_t *msg, size_t msg_size) {
case Type::IMAGE:
while (ProcessImageMessageElement(map_value));
break;
case Type::CALIBRATION:
while (ProcessCalibration(map_value));
break;
case Type::START:
start_message = StartMessage{};
while (ProcessStartMessageElement(map_value));
@@ -894,4 +898,9 @@ EndMessage CBORStream2Deserializer::GetEndMessage() const {
StartMessage CBORStream2Deserializer::GetStartMessage() const {
std::unique_lock<std::mutex> ul(m);
return start_message;
}
CompressedImage CBORStream2Deserializer::GetCalibrationImage() const {
std::unique_lock<std::mutex> ul(m);
return calibration;
}

View File

@@ -15,7 +15,7 @@
class CBORStream2Deserializer {
public:
enum class Type {START, END, IMAGE, NONE};
enum class Type {START, END, IMAGE, CALIBRATION, NONE};
private:
mutable std::mutex m;
@@ -25,6 +25,7 @@ private:
StartMessage start_message;
EndMessage end_message;
CompressedImage calibration;
void DecodeType(CborValue &value);
@@ -33,9 +34,8 @@ private:
void ProcessGoniometerOmega(CborValue &value);
void ProcessAxis(CborValue &value, float v[3]);
void ProcessCalibration(CborValue &value);
void ProcessChannels(CborValue &value);
void ProcessImageData(CborValue &value);
void ProcessImageData(CborValue &value, CompressedImage& image);
void ProcessPixelMaskElement(CborValue &value);
void ProcessRadIntResultElement(CborValue &value);
void ProcessADUHistogramElement(CborValue &value);
@@ -43,6 +43,8 @@ private:
void ProcessROIElementMap(CborValue &value);
void ProcessUnitCellElement(CborValue &value);
void ProcessStartUserData(CborValue &value);
bool ProcessCalibration(CborValue &value);
bool ProcessImageMessageElement(CborValue &value);
bool ProcessStartMessageElement(CborValue &value);
bool ProcessEndMessageElement(CborValue &value);
@@ -55,6 +57,7 @@ public:
// WARNING!!! pointer to data is valid only as long as input buffer is valid
[[nodiscard]] DataMessage GetDataMessage() const;
[[nodiscard]] CompressedImage GetCalibrationImage() const;
};

View File

@@ -314,6 +314,9 @@ inline void CBOR_ENC_START_USER_DATA(CborEncoder& encoder, const char* key,
j["space_group_number"] = message.space_group_number;
j["roi_names"] = message.roi_names;
j["gain_file_names"] = message.gain_file_names;
if (message.write_master_file)
j["write_master_file"] = message.write_master_file.value();
auto str = j.dump();
CBOR_ENC(encoder, key, str);
@@ -394,8 +397,6 @@ void CBORStream2Serializer::SerializeSequenceStart(const StartMessage& message)
CBOR_ENC(mapEncoder, "az_int_bin_to_q", message.az_int_bin_to_q);
CBOR_ENC(mapEncoder, "summation", message.summation);
if (!message.calibration.empty())
CBOR_ENC(mapEncoder, "calibration", message.calibration);
CBOR_ENC_START_USER_DATA(mapEncoder, "user_data", message);
cborErr(cbor_encoder_close_container(&encoder, &mapEncoder));
curr_size = cbor_encoder_get_buffer_size(&encoder, buffer);
@@ -418,7 +419,6 @@ void CBORStream2Serializer::SerializeSequenceEnd(const EndMessage& message) {
CBOR_ENC(mapEncoder, "images_collected", message.images_collected_count);
CBOR_ENC(mapEncoder, "images_sent_to_write", message.images_sent_to_write_count);
CBOR_ENC(mapEncoder, "data_collection_efficiency", message.efficiency);
CBOR_ENC(mapEncoder, "write_master_file", message.write_master_file);
CBOR_ENC_RAD_INT_RESULT(mapEncoder, "az_int_result", message.az_int_result);
CBOR_ENC_ADU_HIST(mapEncoder, "adu_histogram", message.adu_histogram);
CBOR_ENC(mapEncoder, "adu_histogram_bin_width", message.adu_histogram_bin_width);
@@ -454,6 +454,7 @@ void CBORStream2Serializer::SerializeImage(const DataMessage& message) {
CBOR_ENC(mapEncoder, "xfel_event_code", message.xfel_event_code);
CBOR_ENC(mapEncoder, "jf_info", message.jf_info);
CBOR_ENC(mapEncoder, "receiver_aq_dev_delay", message.receiver_aq_dev_delay);
CBOR_ENC(mapEncoder, "receiver_free_send_buf", message.receiver_free_send_buf);
CBOR_ENC(mapEncoder, "storage_cell", message.storage_cell);
CBOR_ENC(mapEncoder, "saturated_pixel_count", message.saturated_pixel_count);
CBOR_ENC(mapEncoder, "error_pixel_count", message.error_pixel_count);
@@ -464,9 +465,43 @@ void CBORStream2Serializer::SerializeImage(const DataMessage& message) {
CBOR_ENC(mapEncoder, "roi_integrals", message.roi);
CBOR_ENC(mapEncoder, "user_data", message.user_data);
CBOR_ENC(mapEncoder, "data", message.image);
cborErr(cbor_encoder_close_container(&encoder, &mapEncoder));
curr_size = cbor_encoder_get_buffer_size(&encoder, buffer);
}
void CBORStream2Serializer::SerializeCalibration(const CompressedImage &image) {
CborEncoder encoder, mapEncoder;
cbor_encoder_init(&encoder, buffer, max_buffer_size, 0);
cborErr(cbor_encode_tag(&encoder, CborSignatureTag ));
cborErr(cbor_encoder_create_map(&encoder, &mapEncoder, CborIndefiniteLength));
CBOR_ENC(mapEncoder, "type", "calibration");
CBOR_ENC(mapEncoder, "magic_number", user_data_magic_number);
CBOR_ENC(mapEncoder, "data", image);
cborErr(cbor_encoder_close_container(&encoder, &mapEncoder));
curr_size = cbor_encoder_get_buffer_size(&encoder, buffer);
}
size_t CBORStream2Serializer::GetImageAppendOffset() {
return curr_size + sizeof(size_t) - 1;
}
void CBORStream2Serializer::AppendImage(size_t image_size) {
if (curr_size + image_size + sizeof(size_t) + 1 >= max_buffer_size)
throw JFJochException(JFJochExceptionCategory::CBORError, "No space to extend the image");
buffer[curr_size - 2] = 0x40 + 27;
curr_size--;
#ifdef LITTLE_ENDIAN
size_t image_size_be = __builtin_bswap64(image_size);
#else
size_t image_size_be = image_size;
#endif
memcpy(buffer + curr_size, &image_size_be, sizeof(size_t));
curr_size += sizeof(size_t);
curr_size += image_size + 0;
buffer[curr_size] = 0xFF;
curr_size++;
}

View File

@@ -19,6 +19,10 @@ public:
void SerializeSequenceStart(const StartMessage& message);
void SerializeSequenceEnd(const EndMessage& message);
void SerializeImage(const DataMessage& message);
void SerializeCalibration(const CompressedImage& image);
[[nodiscard]] size_t GetImageAppendOffset();
void AppendImage(size_t image_size);
};

View File

@@ -16,7 +16,6 @@ ADD_LIBRARY(ImagePusher STATIC
ImagePusher.cpp ImagePusher.h
TestImagePusher.cpp TestImagePusher.h
ZMQStream2PusherGroup.cpp ZMQStream2PusherGroup.h
ZMQStream2PreviewPublisher.cpp ZMQStream2PreviewPublisher.h
ZMQStream2Pusher.cpp
ZMQStream2Pusher.h)

View File

@@ -18,9 +18,12 @@ void PrepareCBORImage(DataMessage& message,
class ImagePusher {
public:
virtual void StartDataCollection(const StartMessage& message) = 0;
virtual void StartDataCollection(StartMessage& message) = 0;
virtual bool EndDataCollection(const EndMessage& message) = 0; // Non-blocking
virtual bool SendImage(const DataMessage& message) = 0; // Non-blocking
virtual bool SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) = 0;
virtual void SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number,
ZeroCopyReturnValue *z) = 0;
virtual bool SendCalibration(const CompressedImage& message) = 0;
};

View File

@@ -14,7 +14,7 @@
#include "../common/SpotToSave.h"
#include "../common/UnitCell.h"
constexpr const uint64_t user_data_release = 3;
constexpr const uint64_t user_data_release = 4;
constexpr const uint64_t user_data_magic_number = 0x52320000UL | user_data_release;
struct CompressedImage {
@@ -27,15 +27,6 @@ struct CompressedImage {
bool pixel_is_float = false;
CompressionAlgorithm algorithm;
std::string channel;
void Save() {
if ((data == nullptr) || (data == tmp_storage.data()))
throw std::runtime_error("Address issue in saving compressed image");
tmp_storage.resize(size);
memcpy(tmp_storage.data(), data, size);
data = tmp_storage.data();
}
std::vector<uint8_t> tmp_storage; // This should be private but will brake initialization list - need to do that properly later
};
struct ROIMessage {
@@ -78,6 +69,7 @@ struct DataMessage {
std::optional<uint64_t> jf_info;
std::optional<uint64_t> receiver_aq_dev_delay;
std::optional<uint64_t> receiver_free_send_buf;
std::optional<uint64_t> storage_cell;
std::optional<float> resolution_estimation;
@@ -162,20 +154,14 @@ struct StartMessage {
std::vector<float> az_int_bin_to_q;
std::vector<CompressedImage> pixel_mask;
std::vector<CompressedImage> calibration;
std::optional<float> total_flux;
std::optional<float> attenuator_transmission;
// Use function below to update approx_size
void AddPixelMask(CompressedImage image) {
image.Save();
pixel_mask.emplace_back(std::move(image));
}
std::optional<bool> write_master_file;
void AddCalibration(CompressedImage image) {
image.Save();
calibration.emplace_back(std::move(image));
void AddPixelMask(CompressedImage image) {
pixel_mask.emplace_back(std::move(image));
}
std::string user_data;
@@ -188,8 +174,6 @@ struct EndMessage {
std::optional<uint64_t> max_receiver_delay;
std::optional<float> efficiency;
std::optional<bool> write_master_file;
std::optional<std::string> end_date;
std::string series_unique_id;

View File

@@ -9,6 +9,7 @@ There are minor differences at the moment:
* LZ4 compression is not allowed; Bitshuffle Zstandard is possible
* Few fields are currently absent
* Few extra fields are present beyond DECTRIS standard
* There is calibration message defined to transfer pedestal correction
## Start message
@@ -48,7 +49,6 @@ There are minor differences at the moment:
| az_int_bin_number | uint64 | Number of azimuthal integration bins | |
| az_int_bin_to_q | Array(float) | Q value for each azimuthal integration bin \[angstrom^-1\] | |
| summation | uint64 | Factor of frame summation | |
| calibration | Map(string -> Image) (optional) | Pedestal values | |
| user_data | string | JSON string that can contain the following fields (among others): | X |
| - source_name | string | Facility name | |
| - source_name_short | string | Facility name (short) | |
@@ -61,6 +61,7 @@ There are minor differences at the moment:
| - sample_name | string | Name of the sample | |
| - omega_rotation_axis | Array(float) | Omega rotation axis (array of size 3) | |
| - space_group_number | uint64 | Space group number | |
| - write_master_file | bool | With multiple sockets, it selects which socket will provide master file | |
See [DECTRIS documentation](https://github.com/dectris/documentation/tree/main/stream_v2) for definition of Image as MultiDimArray with optional compression.
@@ -88,6 +89,7 @@ See [DECTRIS documentation](https://github.com/dectris/documentation/tree/main/s
| xfel_event_code | uint64 | Event code (for SwissFEL) | | X |
| jf_info | uint64 | Detector info field | | |
| receiver_aq_dev_delay | uint64 | Receiver internal delay | | |
| receiver_free_send_buf | uint64 | Receiver internal number of available send buffers | | |
| storage_cell | uint64 | Storage cell number | | |
| saturated_pixel_count | uint64 | Saturated pixel count | | |
| error_pixel_count | uint64 | Error pixel count | | |
@@ -112,12 +114,19 @@ See [DECTRIS documentation](https://github.com/dectris/documentation/tree/main/s
| images_collected | uint64 | Number of image collected | |
| images_sent_to_writer | uint64 | Number of image sent to writer; if writer queues were full, it is possible this is less than images collected | |
| data_collection_efficiency | float | Network packets collected / Network packets expected \[\] | |
| write_master_file | bool | With multiple sockets, it selects which socket will provide master file | |
| az_int_result | Map(text->Array(float)) | Azimuthal integration results, use az_int_bin_to_q from start message for legend | |
| adu_histogram | Map(text->Array(uint64)) | ADU values histogram | |
| adu_histogram_width | uint64 | Width of bins in the above histogram \[ADU\] | |
| max_receiver_delay | uint64 | Internal performance of Jungfraujoch | |
# Calibration message
| Field name | Type | Description | Present in DECTRIS format |
|--------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------|:-------------------------:|
| type | String | value "end" | |
| magic_number | uint64 | Number used to describe version of the Jungfraujoch data interface - to allow to detect inconsistency between sender and receiver | |
| data | Map(string -> Image) | Calibration map (only single pedestal array per message) | |
# Image transfer
Images can be forwarded by Jungraujoch through few interfaces. All of them are ZeroMQ based at the moment, though they differ with serialization layer.

View File

@@ -10,7 +10,7 @@ TestImagePusher::TestImagePusher(int64_t image_number) {
image_id = image_number;
}
void TestImagePusher::StartDataCollection(const StartMessage& message) {
void TestImagePusher::StartDataCollection(StartMessage& message) {
std::unique_lock<std::mutex> ul(m);
if (is_running)
@@ -19,6 +19,14 @@ void TestImagePusher::StartDataCollection(const StartMessage& message) {
is_running = true;
}
bool TestImagePusher::SendCalibration(const CompressedImage &message) {
std::unique_lock<std::mutex> ul(m);
if (!is_running)
correct_sequence = false;
return true;
}
bool TestImagePusher::EndDataCollection(const EndMessage& message) {
std::unique_lock<std::mutex> ul(m);
@@ -29,18 +37,37 @@ bool TestImagePusher::EndDataCollection(const EndMessage& message) {
return true;
}
bool TestImagePusher::SendImage(const DataMessage& message) {
bool TestImagePusher::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) {
std::unique_lock<std::mutex> ul(m);
frame_counter++;
if (message.number == image_id) {
data_message = message;
receiver_generated_image.resize(message.image.size);
memcpy(receiver_generated_image.data(), message.image.data, message.image.size);
if (image_number == image_id) {
CBORStream2Deserializer deserializer;
deserializer.Process(image_data, image_size);
auto image_array = deserializer.GetDataMessage();
receiver_generated_image.resize(image_array.image.size);
memcpy(receiver_generated_image.data(), image_array.image.data, image_array.image.size);
}
return true;
}
void TestImagePusher::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number,
ZeroCopyReturnValue *z) {
std::unique_lock<std::mutex> ul(m);
frame_counter++;
if (image_number == image_id) {
CBORStream2Deserializer deserializer;
deserializer.Process(image_data, image_size);
auto image_array = deserializer.GetDataMessage();
receiver_generated_image.resize(image_array.image.size);
memcpy(receiver_generated_image.data(), image_array.image.data, image_array.image.size);
}
z->release();
}
bool TestImagePusher::CheckSequence() const {
std::unique_lock<std::mutex> ul(m);
return correct_sequence;
@@ -142,4 +169,4 @@ bool TestImagePusher::CheckImage(const DiffractionExperiment &x, const std::vect
if (no_errors && data_message)
logger.Info("Background estimate {} Spot count {}", data_message->bkg_estimate, data_message->spots.size());
return no_errors;
}
}

View File

@@ -20,10 +20,13 @@ class TestImagePusher : public ImagePusher {
size_t frame_counter = 0;
std::optional<DataMessage> data_message;
public:
bool SendImage(const DataMessage& message) override;
explicit TestImagePusher(int64_t image_number);
void StartDataCollection(const StartMessage& message) override;
void StartDataCollection(StartMessage& message) override;
bool EndDataCollection(const EndMessage& message) override;
bool SendCalibration(const CompressedImage& message) override;
bool SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) override;
void SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number, ZeroCopyReturnValue *z) override;
bool CheckImage(const DiffractionExperiment &x,
const std::vector<uint16_t> &raw_reference_image,
const JFCalibration &calibration,

View File

@@ -1,33 +0,0 @@
// Copyright (2019-2024) Paul Scherrer Institute
#include "ZMQStream2PreviewPublisher.h"
#include "CBORStream2Serializer.h"
ZMQStream2PreviewPublisher::ZMQStream2PreviewPublisher(ZMQContext& context, const std::string& addr) :
socket(context, ZMQSocketType::Pub) {
socket.Conflate(true).NoLinger();
socket.Bind(addr);
}
void ZMQStream2PreviewPublisher::StartDataCollection(const StartMessage& message) {
// Start message is not transmitted here
}
bool ZMQStream2PreviewPublisher::SendImage(const DataMessage& message) {
size_t approx_size = message.image.size + 2*1024*1024;
std::vector<uint8_t> serialization_buffer(approx_size);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
serializer.SerializeImage(message);
return socket.Send(serialization_buffer.data(), serializer.GetBufferSize(), false);
}
bool ZMQStream2PreviewPublisher::EndDataCollection(const EndMessage &message) {
return true; // End message is not transmitted here
}
std::string ZMQStream2PreviewPublisher::GetAddress() {
return socket.GetEndpointName();
}

View File

@@ -1,21 +0,0 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifndef JUNGFRAUJOCH_ZMQSTREAM2PREVIEWPUBLISHER_H
#define JUNGFRAUJOCH_ZMQSTREAM2PREVIEWPUBLISHER_H
#include "../common/ZMQWrappers.h"
#include "ImagePusher.h"
class ZMQStream2PreviewPublisher : public ImagePusher {
ZMQSocket socket;
public:
ZMQStream2PreviewPublisher(ZMQContext& context, const std::string& addr);
void StartDataCollection(const StartMessage& message) override;
bool SendImage(const DataMessage& message) override;
bool EndDataCollection(const EndMessage &message) override;
std::string GetAddress();
};
#endif //JUNGFRAUJOCH_ZMQSTREAM2PREVIEWPUBLISHER_H

View File

@@ -25,14 +25,11 @@ void ZMQStream2Pusher::Bind(const std::string &addr, int32_t send_buffer_high_wa
socket.Bind(addr);
}
void ZMQStream2Pusher::StartDataCollection(const StartMessage &message) {
void ZMQStream2Pusher::StartDataCollection(StartMessage &message) {
size_t approx_size = 1024*1024;
for (const auto &x : message.pixel_mask)
approx_size += x.size;
for (const auto &x : message.calibration)
approx_size += x.size;
std::vector<uint8_t> serialization_buffer(approx_size);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
serializer.SerializeSequenceStart(message);
@@ -40,14 +37,13 @@ void ZMQStream2Pusher::StartDataCollection(const StartMessage &message) {
throw JFJochException(JFJochExceptionCategory::ZeroMQ, "Timeout on pushing start message on addr " + GetAddress());
}
bool ZMQStream2Pusher::SendImage(const DataMessage &message) {
size_t approx_size = message.image.size + 2*1024*1024;
std::vector<uint8_t> serialization_buffer(approx_size);
bool ZMQStream2Pusher::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) {
return socket.Send(image_data, image_size, false);
}
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
serializer.SerializeImage(message);
return socket.Send(serialization_buffer.data(), serializer.GetBufferSize(), false); // Non-blocking
void ZMQStream2Pusher::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number,
ZeroCopyReturnValue *z) {
socket.SendZeroCopy(image_data,image_size, z);
}
bool ZMQStream2Pusher::EndDataCollection(const EndMessage &message) {
@@ -58,6 +54,13 @@ bool ZMQStream2Pusher::EndDataCollection(const EndMessage &message) {
return socket.Send(serialization_buffer.data(), serializer.GetBufferSize(), true); // Blocking
}
bool ZMQStream2Pusher::SendCalibration(const CompressedImage &message) {
std::vector<uint8_t> serialization_buffer(80 * 1024 * 1024);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
serializer.SerializeCalibration(message);
return socket.Send(serialization_buffer.data(), serializer.GetBufferSize(), true); // Blocking
}
std::string ZMQStream2Pusher::GetAddress() {
return socket.GetEndpointName();
}
}

View File

@@ -10,14 +10,20 @@ class ZMQStream2Pusher : public ImagePusher {
std::unique_ptr<ZMQContext> context;
ZMQSocket socket;
public:
ZMQStream2Pusher(ZMQContext& context, const std::string& addr,
int32_t send_buffer_high_watermark = -1, int32_t send_buffer_size = -1);
ZMQStream2Pusher(ZMQContext& context,
const std::string& addr,
int32_t send_buffer_high_watermark = -1,
int32_t send_buffer_size = -1);
ZMQStream2Pusher(const std::string& addr, int32_t send_buffer_high_watermark = -1, int32_t send_buffer_size = -1);
explicit ZMQStream2Pusher(const std::string& addr,
int32_t send_buffer_high_watermark = -1,
int32_t send_buffer_size = -1);
void Bind(const std::string& addr, int32_t send_buffer_high_watermark, int32_t send_buffer_size);
void StartDataCollection(const StartMessage& message) override;
bool SendImage(const DataMessage& message) override;
void StartDataCollection(StartMessage& message) override;
bool SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) override;
void SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number, ZeroCopyReturnValue *z) override;
bool EndDataCollection(const EndMessage &message) override;
bool SendCalibration(const CompressedImage& message) override;
std::string GetAddress();
};

View File

@@ -24,31 +24,45 @@ ZMQStream2PusherGroup::ZMQStream2PusherGroup(const std::vector<std::string> &add
(a, send_buffer_high_watermark, send_buffer_size));
}
bool ZMQStream2PusherGroup::SendImage(const DataMessage& message) {
if (pusher.empty())
bool ZMQStream2PusherGroup::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) {
if (!pusher.empty()) {
auto socket_number = (image_number / images_per_file) % pusher.size();
return pusher[socket_number]->SendImage(image_data, image_size, image_number);
} else
return false;
auto socket_number = (message.number / images_per_file) % pusher.size();
return pusher[socket_number]->SendImage(message);
}
void ZMQStream2PusherGroup::StartDataCollection(const StartMessage& message) {
void ZMQStream2PusherGroup::SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number,
ZeroCopyReturnValue *z) {
if (!pusher.empty()) {
auto socket_number = (image_number / images_per_file) % pusher.size();
pusher[socket_number]->SendImage(image_data, image_size, image_number, z);
}
}
void ZMQStream2PusherGroup::StartDataCollection(StartMessage& message) {
if (message.images_per_file < 1)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"Images per file cannot be zero or negative");
images_per_file = message.images_per_file;
for (auto &p: pusher)
for (auto &p: pusher) {
p->StartDataCollection(message);
message.write_master_file = false;
}
}
bool ZMQStream2PusherGroup::SendCalibration(const CompressedImage &message) {
if (pusher.empty())
return false;
return pusher[0]->SendCalibration(message);
}
bool ZMQStream2PusherGroup::EndDataCollection(const EndMessage& message) {
EndMessage end_message = message;
bool ret = true;
for (auto &p: pusher) {
ret = ret && p->EndDataCollection(end_message);
end_message.write_master_file = false;
if (!p->EndDataCollection(message))
ret = false;
}
return ret;
}
@@ -58,4 +72,4 @@ std::vector<std::string> ZMQStream2PusherGroup::GetAddress() {
for (auto &p: pusher)
ret.push_back(p->GetAddress());
return ret;
}
}

View File

@@ -17,9 +17,11 @@ public:
explicit ZMQStream2PusherGroup(const std::vector<std::string>& addr,
int32_t send_buffer_high_watermark = -1, int32_t send_buffer_size = -1);
void StartDataCollection(const StartMessage& message) override;
bool SendImage(const DataMessage& message) override;
void StartDataCollection(StartMessage& message) override;
void SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number, ZeroCopyReturnValue *z) override;
bool SendImage(const uint8_t *image_data, size_t image_size, int64_t image_number) override;
bool EndDataCollection(const EndMessage& message) override;
bool SendCalibration(const CompressedImage& message) override;
std::vector<std::string> GetAddress();
};

File diff suppressed because it is too large Load Diff

View File

@@ -14,7 +14,7 @@
"@types/node": "^18.11.13",
"@types/react": "^18.0.26",
"@types/react-dom": "^18.0.9",
"plotly.js": "^2.16.4",
"plotly.js": "2.31.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-plotly.js": "^2.6.0",
@@ -23,8 +23,8 @@
"typescript": "^4.9.4"
},
"scripts": {
"start": "PORT=8000 react-scripts start",
"build": "react-scripts build",
"start": "REACT_APP_VERSION=$(git rev-parse --short HEAD) PORT=8000 react-scripts start",
"build": "REACT_APP_VERSION=$(git rev-parse --short HEAD) react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
"openapi": "openapi --input ../broker/jfjoch_api.yaml --output ./src/openapi"

View File

@@ -144,7 +144,8 @@ class App extends Component<MyProps, MyState> {
<center>Developed at Paul Scherrer Institute (2019-2024). Main author: <a
href="mailto:filip.leonarski@psi.ch">Filip Leonarski</a> <br/>
For more information see <a href="https://doi.org/10.1107/S1600577522010268"><i>J. Synchrotron
Rad.</i> (2023). <b>30</b>, 227234</a></center>
Rad.</i> (2023). <b>30</b>, 227234</a> <br/>
Build: {process.env.REACT_APP_VERSION}</center>
<br/>
</ThemeProvider>
}

View File

@@ -16,7 +16,8 @@ export enum PlotType {
STRONG_PIXELS,
ROI_SUM,
ROI_MAX_COUNT,
RES_ESTIMATION
RES_ESTIMATION,
RECEIVER_FREE_SEND_BUFS
}
type MyProps = {
@@ -122,6 +123,13 @@ class DataProcessingPlot extends Component<MyProps, MyState> {
this.setState({connection_error: true});
});
break;
case PlotType.RECEIVER_FREE_SEND_BUFS:
DefaultService.postPlotReceiverFreeSendBuffers({binning: this.props.binning})
.then(data => this.setState({plots: data, connection_error: false}))
.catch(error => {
this.setState({connection_error: true});
});
break;
case PlotType.ROI_SUM:
DefaultService.postPlotRoiSum({binning: this.props.binning})
.then(data => this.setState({plots: data, connection_error: false}))

View File

@@ -72,6 +72,9 @@ class DataProcessingPlots extends Component<MyProps, MyState> {
case "12":
this.setState({type: PlotType.RES_ESTIMATION, xlabel: "Resolution [&#8491;]", ylabel: "Number of crystals"});
break;
case "13":
this.setState({type: PlotType.RECEIVER_FREE_SEND_BUFS, xlabel: "Image number", ylabel: "Number of buffers"});
break;
}
};
@@ -104,6 +107,7 @@ class DataProcessingPlots extends Component<MyProps, MyState> {
<MenuItem value={10}>Strong pixels (for spot finding)</MenuItem>
<MenuItem value={7}>Image collection efficiency</MenuItem>
<MenuItem value={9}>Receiver delay (internal debugging)</MenuItem>
<MenuItem value={13}>Receiver free send buffers (internal debugging)</MenuItem>
</Select>
</FormControl>

View File

@@ -8,7 +8,7 @@ import type { rotation_axis } from './rotation_axis';
export type dataset_settings = {
/**
* For standard synchrotron data collection - this is number of images collected per one TTL trigger
* For XFEL mode - this number is ignored and set to 1
* For XFEL (pulsed source) - this number is ignored and set to 1
* For storage cell mode - this number is ignored and set to number of storage cells
*
*/
@@ -20,6 +20,10 @@ export type dataset_settings = {
ntrigger?: number;
/**
* FPGA frame summation. For summation above two 32-bit pixel format will be used, unless explicitly specified.
* Frame summation factor applies only to conversion mode (assumed as 1 for raw data).
* In XFEL mode: summation happens for frames collected with multiple triggers.
* Ignored for storage cells (assumed as 1).
*
*/
summation?: number;
/**

View File

@@ -659,6 +659,27 @@ export class DefaultService {
});
}
/**
* Generate receiver free send buffer plot
* Amount of send buffers available during frame processing - used for internal debugging; binning is configurable
* @param requestBody
* @returns plots Everything OK
* @throws ApiError
*/
public static postPlotReceiverFreeSendBuffers(
requestBody?: plot_request,
): CancelablePromise<plots> {
return __request(OpenAPI, {
method: 'POST',
url: '/plot/receiver_free_send_buffers',
body: requestBody,
mediaType: 'application/json',
errors: {
400: `Input parsing or validation error`,
},
});
}
/**
* Generate image collection efficiency plot
* Ratio of collected and expected packets per image; binning is configurable

View File

@@ -1 +0,0 @@

View File

@@ -1,173 +0,0 @@
# Copyright (2019-2023) Paul Scherrer Institute
#
import requests
class DetectorClient:
def __init__(self, hostname: str, port: int):
self.__addr = "http://{}:{}/".format(hostname, port)
def __post_request(self, prefix: str, content: dict) -> None:
api_url = self.__addr + "/" + prefix
response = requests.post(api_url, json=content)
response.raise_for_status()
def __put_request(self, prefix: str, content: dict) -> None:
api_url = self.__addr + "/" + prefix
response = requests.put(api_url, json=content)
response.raise_for_status()
def __get_request(self, prefix: str) -> dict:
api_url = self.__addr + "/" + prefix
response = requests.get(api_url)
response.raise_for_status()
return response.json()
def initialize(self) -> None:
"""
Need to be performed before starting measurement.
It is also used to recover detector from error condition.
Initialize will execute calibration procedure.
"""
self.__post_request("/detector/initialize", dict())
def deactivate(self) -> None:
"""
Prepare the detector to be turned off (turn off high voltage and ASIC).
"""
self.__post_request("/detector/deactivate", dict())
def pedestal(self) -> None:
"""
Calibration procedure
"""
self.__post_request("/detector/pedestal", dict())
def start(self, dataset_settings: dict) -> None:
"""
Start measurement
:param dataset_settings: Dictionary containing dataset specific parameters:
["images_per_trigger"] - number of images collected per trigger;
["ntrigger"] - number of triggers detector should expect;
["summation"] or ["image_time_us"] - relation between internal frame time and image time
(use only one of the two options; if image time is used, it has to be multiple of frame time);
["beam_x_pxl"] - beam center location on X-axis in pixels;
["beam_y_pxl"] - beam center location on Y-axis in pixels;
["detector_distance_mm"] - detector distance from the sample in mm;
["photon_energy_keV"] - photon energy;
["file_prefix"] - prefix for HDF5 files (empty == no files are written, e.g. for preview);
["data_file_count"] - split data frames into N-files on a round-robin basis;
["compression"] - algorithm for compression (NO_COMPRESSION, BSHUF_LZ4, BSHUF_ZSTD, and BSHUF_ZSTD_RLE);
["sample_name"] - name of sample, can work as merge ID;
["unit_cell"] - unit cell (dictionary with a, b, c, alpha, beta, gamma);
["scattering vector"] - optional (default is [0, 0, 1], array of 3 floats
"""
self.__post_request("/detector/start", dataset_settings)
def trigger(self) -> None:
"""
Send soft trigger to the detector
"""
self.__post_request("/detector/trigger", dict())
def stop(self) -> None:
"""
Block till measurement is finished (it is not necessary to call stop to start next measurement).
"""
self.__post_request("/detector/stop", dict())
def cancel(self) -> None:
"""
Cancel running data collection.
"""
self.__post_request("/detector/cancel", dict())
@property
def detector_settings(self) -> dict:
"""
Get settings of the detector that require to redo the calibration
:return Dictionary with detector settings (see setter for more information)
"""
return self.__get_request("/detector/settings")
@detector_settings.setter
def detector_settings(self, settings: dict):
"""
Put settings of the detector that require to redo the calibration.
Can only be done when detector is idle and change will trigger pedestal calibration.
:param settings: Dictionary with the following entries:
["frame_time_us"] - internal frame time (recommended is using only 1000 and 500 us values);
["count_time_us"] - count time (must be less by at least 20 us than frame time, zero means the longest possible
for the given frame time);
["use_storage_cells"] - when True enables usage of all 16 storage cells;
["use_internal_packet_generator"] - take frames from the internal packet generator of the FPGA
(to test FPGA if detector is not present);
["collect_raw_data"] - when True JUNGFAU images won't be converted to photon counts;
["pedestal_g0_frames"] - number of frames used to calculate G0 pedestal
(optional, if not provided current value won't be changed);
["pedestal_g1_frames"] - number of frames used to calculate G1 pedestal
(optional, if not provided current value won't be changed);
["pedestal_g2_frames"] - number of frames used to calculate G2 pedestal
(optional, if not provided current value won't be changed);
"""
self.__put_request("/detector/settings", settings)
@property
def data_processing_settings(self) -> dict:
"""
Get settings of the data processing
:return Dictionary with data processing settings (see setter for more information)
"""
return self.__get_request("/data_processing/settings")
@data_processing_settings.setter
def data_processing_settings(self, settings: dict):
"""
Configure real-time data processing settings, safe to do anytime, including during measurement
:param settings: Dictionary with the following entries:
"""
self.__put_request("/data_processing/settings", settings)
@property
def last_measurement_stats(self):
"""
Get statistics for the last measurement
:return Dictionary with the following entries:
["file_prefix"] - file prefix for data collection;
["images_collected"] - number of images successfully collected;
["max_image_number_sent"] - the highest image number
(can differ from image number, if images were lost during collection);
["collection_efficiency"] - fraction of expected packets collected (1.0 is all good);
["compression_ratio"] - compression ratio of the dataset;
["cancelled"] - if cancellation happened during data collection;
["max_receive_delay"] - number of frames receiver was late vs. detector - large number means there were problems;
["writer_performance_MBs"] - total writing speed (only present if files were written);
["images_written"] - number of images written (should be the same as images_collected,
only present if files were written);
["indexing_rate"] - indexing rate from real-time indexing;
["indexing_performance_ms"] - average time to index a single image;
"""
return self.__get_request("/detector/last_measurement")
@property
def status(self):
"""
Get detector status:
:return Dictionary with the following entries:
["broker_state"] - current state: NOT_INITIALIZED, IDLE, BUSY, PEDESTAL, DATA_COLLECTION, ERROR;
["progress'] - data collection progress in percent (only if measurement running);
["indexing_rate"] - instantaneous indexing rate from real-time indexing (only if measurement running);
"""
return self.__get_request("/detector/status")

View File

@@ -1,121 +0,0 @@
# Copyright (2019-2023) Paul Scherrer Institute
#
import h5py
import sys
import gemmi
def wvl_a_to_ev(val: float) -> float:
"""
Convert wavelength in A to photon energy in eV
"""
return 12398.4 / val
def extract_metadata_nxmx(nxmx_file: str) -> dict:
"""
Create a dictionary of metadata based on HDF5 master file
"""
output = {}
with h5py.File(nxmx_file, "r") as f:
output["filename"] = nxmx_file
output["photon_energy_eV"] = wvl_a_to_ev(
f["/entry/instrument/beam/incident_wavelength"][()]
)
output["pixel_size_m"] = f["/entry/instrument/detector/x_pixel_size"][()]
if "/entry/instrument/detector/underload_value" in f:
output["underload"] = f["/entry/instrument/detector/underload_value"][()]
output["saturation"] = f["/entry/instrument/detector/saturation_value"][()]
output["detector_distance_m"] = f[
"/entry/instrument/detector/detector_distance"
][()]
output["beam_center_x"] = f["/entry/instrument/detector/beam_center_x"][()]
output["beam_center_y"] = f["/entry/instrument/detector/beam_center_y"][()]
output["width"] = f["/entry/data/data"].shape[2]
output["height"] = f["/entry/data/data"].shape[1]
output["image_time_s"] = f["/entry/instrument/detector/frame_time"][()]
output["sample_name"] = f["/entry/sample/name"][()]
if "/entry/sample/unit_cell" in f:
output["unit_cell"] = f["/entry/sample/unit_cell"][:6]
if "/entry/sample/space_group" in f:
output["space_group"] = f["/entry/sample/space_group"]
return output
def nxmx_to_geom(metadata_nxmx: dict, output_file: str) -> None:
"""
Write dictionary generated by extract_metadata_nxmx to CrystFEL geometry file
"""
output = {}
output["photon_energy"] = "{:f}".format(metadata_nxmx["photon_energy_eV"])
output["adu_per_eV"] = "{:.10f}".format(1 / metadata_nxmx["photon_energy_eV"])
output["clen"] = "{:f}".format(metadata_nxmx["detector_distance_m"])
output["res"] = "{:f}".format(1.0 / metadata_nxmx["pixel_size_m"])
if "underload" in metadata_nxmx:
output["flag_lessthen"] = metadata_nxmx["underload"] + 1
output["rigid_group_0"] = 0
output["rigid_group_collection_0"] = 0
output["data"] = "/entry/data/data"
output["dim0"] = "%"
output["dim1"] = "ss"
output["dim2"] = "fs"
output["mask_file"] = metadata_nxmx["filename"]
output["mask"] = "/entry/instrument/detector/pixel_mask"
output["mask_good"] = "0x0"
output["mask_bad"] = "0xffffffff"
output["0/min_fs"] = 0
output["0/min_ss"] = 0
output["0/max_fs"] = metadata_nxmx["width"]
output["0/max_ss"] = metadata_nxmx["height"]
output["0/corner_x"] = "{:f}".format(-1.0 * metadata_nxmx["beam_center_x"])
output["0/corner_y"] = "{:f}".format(-1.0 * metadata_nxmx["beam_center_y"])
output["0/fs"] = "x"
output["0/ss"] = "y"
with open(output_file, "w") as f:
for i in output:
f.write("{} = {}\n".format(i, output[i]))
def nxmx_to_cell(metadata_nxmx: dict, output_file: str):
"""
Write dictionary generated by extract_metadata_nxmx to CrystFEL cell file (PDB)
"""
unit_cell_str = "{:9.3f}{:9.3f}{:9.3f}{:7.2f}{:7.2f}{:7.2f}".format(
*metadata_nxmx["unit_cell"]
)
if "space_group" in metadata_nxmx:
space_group_str = gemmi.find_spacegroup_by_number(
metadata_nxmx["space_group"]
).hm
else:
space_group_str = gemmi.find_spacegroup_by_number(1).hm
with open(output_file, "w") as f:
f.write("CRYST1{:s} {:11s}{:4d}\n".format(unit_cell_str, space_group_str, 1))
if __name__ == "__main__":
if (len(sys.argv) != 4) and (len(sys.argv) != 3):
print(
"Usage ./jungfraujoch_metadata.py <JUNGFRAU/EIGER master file> <CrystFEL output file> {<cell file>}"
)
sys.exit(1)
else:
metadata = extract_metadata_nxmx(sys.argv[1])
nxmx_to_geom(metadata, sys.argv[2])
if len(sys.argv) == 4:
nxmx_to_cell(metadata, sys.argv[3])

View File

@@ -1,99 +0,0 @@
# Uses DECTRIS Stream2 example
import zmq
import numpy as np
import sys
import signal
import cbor2
import time
from dectris.compression import decompress
from tifffile import imwrite
TERMINATE = False
def decode_multi_dim_array(tag, column_major):
dimensions, contents = tag.value
if isinstance(contents, list):
array = np.empty((len(contents),), dtype=object)
array[:] = contents
elif isinstance(contents, (np.ndarray, np.generic)):
array = contents
else:
raise cbor2.CBORDecodeValueError("expected array or typed array")
return array.reshape(dimensions, order="F" if column_major else "C")
def decode_typed_array(tag, dtype):
if not isinstance(tag.value, bytes):
raise cbor2.CBORDecodeValueError("expected byte string in typed array")
return np.frombuffer(tag.value, dtype=dtype)
def decode_dectris_compression(tag):
algorithm, elem_size, encoded = tag.value
return decompress(encoded, algorithm, elem_size=elem_size)
tag_decoders = {
40: lambda tag: decode_multi_dim_array(tag, column_major=False),
64: lambda tag: decode_typed_array(tag, dtype="u1"),
65: lambda tag: decode_typed_array(tag, dtype=">u2"),
66: lambda tag: decode_typed_array(tag, dtype=">u4"),
67: lambda tag: decode_typed_array(tag, dtype=">u8"),
68: lambda tag: decode_typed_array(tag, dtype="u1"),
69: lambda tag: decode_typed_array(tag, dtype="<u2"),
70: lambda tag: decode_typed_array(tag, dtype="<u4"),
71: lambda tag: decode_typed_array(tag, dtype="<u8"),
72: lambda tag: decode_typed_array(tag, dtype="i1"),
73: lambda tag: decode_typed_array(tag, dtype=">i2"),
74: lambda tag: decode_typed_array(tag, dtype=">i4"),
75: lambda tag: decode_typed_array(tag, dtype=">i8"),
77: lambda tag: decode_typed_array(tag, dtype="<i2"),
78: lambda tag: decode_typed_array(tag, dtype="<i4"),
79: lambda tag: decode_typed_array(tag, dtype="<i8"),
80: lambda tag: decode_typed_array(tag, dtype=">f2"),
81: lambda tag: decode_typed_array(tag, dtype=">f4"),
82: lambda tag: decode_typed_array(tag, dtype=">f8"),
83: lambda tag: decode_typed_array(tag, dtype=">f16"),
84: lambda tag: decode_typed_array(tag, dtype="<f2"),
85: lambda tag: decode_typed_array(tag, dtype="<f4"),
86: lambda tag: decode_typed_array(tag, dtype="<f8"),
87: lambda tag: decode_typed_array(tag, dtype="<f16"),
1040: lambda tag: decode_multi_dim_array(tag, column_major=True),
56500: lambda tag: decode_dectris_compression(tag),
}
def tag_hook(decoder, tag):
tag_decoder = tag_decoders.get(tag.tag)
return tag_decoder(tag) if tag_decoder else tag
def signal_handler(signal, frame):
global TERMINATE
TERMINATE = True
signal.signal(signal.SIGINT, signal_handler)
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.RCVTIMEO = 20000 # in milliseconds
socket.connect("tcp://127.0.0.1:5400")
socket.setsockopt(zmq.SUBSCRIBE, b"")
while not TERMINATE:
try:
msg = socket.recv()
msg = cbor2.loads(msg, tag_hook=tag_hook)
imwrite('test.tif', msg['data']['default'])
title = msg['series_unique_id']
beam_center_x = msg['beam_center_x']
beam_center_y = msg['beam_center_y']
detector_distance = msg['detector_distance']
energy = msg['incident_energy']
print(msg["spots"])
except:
pass

View File

@@ -1,123 +0,0 @@
# Uses DECTRIS Stream2 example
import zmq
import numpy as np
import sys
sys.path.insert(0, "/opt/dectris/albula/4.1/bin")
sys.path.insert(0, "/opt/dectris/albula/4.1/python")
import dectris.albula as albula
from dectris.albula import DNoObject, DDrawingEllipse, DDrawingString, DDrawingLine
import signal
import cbor2
import time
from dectris.compression import decompress
from tifffile import imwrite
TERMINATE = False
def decode_multi_dim_array(tag, column_major):
dimensions, contents = tag.value
if isinstance(contents, list):
array = np.empty((len(contents),), dtype=object)
array[:] = contents
elif isinstance(contents, (np.ndarray, np.generic)):
array = contents
else:
raise cbor2.CBORDecodeValueError("expected array or typed array")
return array.reshape(dimensions, order="F" if column_major else "C")
def decode_typed_array(tag, dtype):
if not isinstance(tag.value, bytes):
raise cbor2.CBORDecodeValueError("expected byte string in typed array")
return np.frombuffer(tag.value, dtype=dtype)
def decode_dectris_compression(tag):
algorithm, elem_size, encoded = tag.value
return decompress(encoded, algorithm, elem_size=elem_size)
tag_decoders = {
40: lambda tag: decode_multi_dim_array(tag, column_major=False),
64: lambda tag: decode_typed_array(tag, dtype="u1"),
65: lambda tag: decode_typed_array(tag, dtype=">u2"),
66: lambda tag: decode_typed_array(tag, dtype=">u4"),
67: lambda tag: decode_typed_array(tag, dtype=">u8"),
68: lambda tag: decode_typed_array(tag, dtype="u1"),
69: lambda tag: decode_typed_array(tag, dtype="<u2"),
70: lambda tag: decode_typed_array(tag, dtype="<u4"),
71: lambda tag: decode_typed_array(tag, dtype="<u8"),
72: lambda tag: decode_typed_array(tag, dtype="i1"),
73: lambda tag: decode_typed_array(tag, dtype=">i2"),
74: lambda tag: decode_typed_array(tag, dtype=">i4"),
75: lambda tag: decode_typed_array(tag, dtype=">i8"),
77: lambda tag: decode_typed_array(tag, dtype="<i2"),
78: lambda tag: decode_typed_array(tag, dtype="<i4"),
79: lambda tag: decode_typed_array(tag, dtype="<i8"),
80: lambda tag: decode_typed_array(tag, dtype=">f2"),
81: lambda tag: decode_typed_array(tag, dtype=">f4"),
82: lambda tag: decode_typed_array(tag, dtype=">f8"),
83: lambda tag: decode_typed_array(tag, dtype=">f16"),
84: lambda tag: decode_typed_array(tag, dtype="<f2"),
85: lambda tag: decode_typed_array(tag, dtype="<f4"),
86: lambda tag: decode_typed_array(tag, dtype="<f8"),
87: lambda tag: decode_typed_array(tag, dtype="<f16"),
1040: lambda tag: decode_multi_dim_array(tag, column_major=True),
56500: lambda tag: decode_dectris_compression(tag),
}
def tag_hook(decoder, tag):
tag_decoder = tag_decoders.get(tag.tag)
return tag_decoder(tag) if tag_decoder else tag
def signal_handler(signal, frame):
global TERMINATE
TERMINATE = True
signal.signal(signal.SIGINT, signal_handler)
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.RCVTIMEO = 20000 # in milliseconds
socket.connect("tcp://127.0.0.1:5400")
socket.setsockopt(zmq.SUBSCRIBE, b"")
albulaMain = albula.openMainFrame()
albulaSubFrame = albulaMain.openSubFrame()
startTime = time.time()
OPTIONAL_DATA = albula.DImageOptionalData()
OPTIONAL_DATA.set_x_pixel_size(0.000075)
OPTIONAL_DATA.set_y_pixel_size(0.000075)
while not TERMINATE:
try:
print("a")
msg = socket.recv()
if round(time.time() - startTime) >= 60 * 60:
albulaSubFrame.close()
albulaSubFrame = albulaMain.openSubFrame()
startTime = time.time()
print("b")
msg = cbor2.loads(msg, tag_hook=tag_hook)
print(msg)
#OPTIONAL_DATA.set_wavelength(12398/msg["incident_energy"])
#OPTIONAL_DATA.set_beam_center_x(msg['beam_center_x'])
#OPTIONAL_DATA.set_beam_center_y(msg['beam_center_y'])
#OPTIONAL_DATA.set_detector_distance(msg["detector_distance"])
dimage = albula.DImage(msg['data']['default'])
dimage.setOptionalData(OPTIONAL_DATA)
albulaSubFrame.loadImage(dimage)
#albulaSubFrame.setTitle("JUNGFRAU PREVIEW Dataset: %s Image: %d" % (msg["unique_series_id"], msg['number']))
except:
pass

View File

@@ -1,28 +0,0 @@
# To use the preview
```python
source /opt/conda/etc/profile.d/conda.sh
conda activate /mxn/groups/sw/mxsw/jfjoch/envs/cbor-py37-24a
./start_adxv.sh -p 8100
<ctrl+z>
bg
python preview.py "tcp://172.16.230.69:2345"
# port 2345: testing data
# port 5400: jf detector data
```
spot_type.txt can be edited to change spot color and spot size
# Attention
This can be run using duo_user_name@clu0-fe-1 and duo_user_name@b-micromax-cc-4
Might need to change paths
# Credits
start_adxv.sh and adxvSocketh5.py are contributed by Jie Nan (Max IV)
# To-do
- more documentations
- more error feedbacks
- make the adxv backgorund process automated
- make spot_type.txt file path more robust in the preview.py code

View File

@@ -1,192 +0,0 @@
"adxvSocketh5.py" 190L, 7371C
#!/usr/bin/env python
"""
Library for adxv-socketh5
"""
__author__ = "JieNan"
__date__ = "2022/05/12"
__version__ = "0.1"
import argparse
import socket
import os
#import h5py
import time
import numpy as np
class adxvSocketh5:
def __init__(self, host="localhost", port = 8100, spot_type_file = None):
#def __init__(self, host="localhost", spot_type_file = None):
self.adxv_sock = None
self.host = host
self.port = 8100
self.cont_size = None
self.master_file = None
self.img_num = None
self.num_spots_groups = 0 # number of sets of spots
self.spot_type_file = spot_type_file
def __del__(self):
try:
if self.adxv_sock is not None:
self.adxv_sock.close()
except:
pass
def init(self):
try:
self.adxv_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print("init adxv socket...... {} {}".format(self.host,self.port))
self.adxv_sock.connect((self.host,self.port))
self.run_adxv_cmd("rings on")
self.run_adxv_cmd("colors Heat")
self.run_adxv_cmd("raise_window Magnify")
self.clear_spot()
self.set_spot_type()
except:
raise Exception("Init Error: Could not connect to adxv host")
self.adxv_sock.close()
def set_spot_type(self):
if self.spot_type_file:
with open(self.spot_type_file) as fp:
lines = fp.read().splitlines()
for line in lines:
tmp= line.split()
color = tmp[1]
radius = tmp[2]
print("define_type {} color {} radius {}\n".format(tmp[0],color,radius))
self.run_adxv_cmd("define_type {} color {} radius {}\n".format(tmp[0],color,radius))
def run_adxv_cmd(self, cmd=""):
self.adxv_sock.sendall("{}\n".format(cmd).encode())
def get_h5_num(self, img_num):
h5_img_num = (img_num -1) % self.cont_size + 1
h5_cont_num = int((img_num -1) / self.cont_size) + 1
h5_cont = self.master_file.replace("_master.h5","_data_%06d.h5" % h5_cont_num)
return h5_cont, h5_img_num
def load_tiff(self, tiff_path):
weak_data = 1
self.run_adxv_cmd("weak_data {}".format(weak_data))
self.run_adxv_cmd("load_image {}".format(tiff_path))
def load_image(self, slabs=1):
h5_cont, h5_img_num = self.get_h5_num(self.img_num)
print("will try to load file {}".format(h5_cont))
if not os.path.exists(h5_cont):
raise Exception("Error: data file {} doesn't exist".format(h5_cont))
weak_data = 1
self.run_adxv_cmd("set_slab {}".format(h5_img_num))
self.run_adxv_cmd("weak_data {}".format(weak_data))
self.run_adxv_cmd("set_slabs {}".format(slabs))
self.run_adxv_cmd("load_image {}".format(h5_cont)
def add_spots(self, spots_sets, total_num_spots):
# spots_sets is a list of numpy array, each array is a set/group of spots
# spots_sets[i][j][0] is pos_x, spots[i][j][0] is pos_y
self.num_spots_groups = len(spots_sets)
self.run_adxv_cmd("load_spots {}\n".format(total_num_spots))
spots_group = 1
for spots in spots_sets:
for spot in spots:
self.run_adxv_cmd("{} {} {}\n".format(spot[0],spot[1], spots_group))
spots_group +=1
self.run_adxv_cmd("end_of_pack\n")
def clear_spot(self):
#self.
self.num_spots_groups = 0
self.run_adxv_cmd("load_spots 0\n")
self.run_adxv_cmd("end_of_pack\n")
def save_img(self, img_file="", overwrite=False):
if overwrite and os.path.exists(img_file):
try:
os.remove(img_file)
except:
raise Exception("Error: File {} already exists and cannot overwrite it, please check the permission".format(img_file))
self.run_adxv_cmd("save_image {}".format(img_file))
def set_color(self, color="Grey"):
self.run_adxv_cmd("colors {}".format(color))
def update_slabs(self, slabs = 1):
self.run_adxv_cmd("slabs {}".format(slabs))
def update_meta(self, master_file=""):
# make sure the first data container is available before calling
data_h5 = master_file.replace("_master.h5", "_data_000001.h5")
if not os.path.exists(data_h5):
raise Exception ("Error: {} doesn't exist...".format(data_h5) )
self.master_file = master_file
with h5py.File(master_file, 'r') as f:
self.cont_size = f['/entry/data/data_000001'].shape[0]
# update the meta info in adxv
self.run_adxv_cmd("load_image {}".format(self.master_file))
def load_image_from_h5(self, h5_cont, img_num):
self.run_adxv_cmd("set_slab {}".format(img_num))
self.run_adxv_cmd("load_image {}".format(h5_cont))
def load_image_from_master(self, master_file, img_num =1):
if not os.access(master_file, os.R_OK):
raise Exception("Error: No access to master file {}".format(master_file))
self.update_meta(master_file=master_file)
self.img_num = img_num
self.load_image()
def read_spots_from_file(self, spots_file):
try:
with open(spots_file) as fp:
lines = fp.read().splitlines()
spots = np.loadtxt(lines)
except Exception as ex:
raise Exception("Error when reading spots from file {}".format(spots_file))
return spots.astype(int)
def save_sptxt(tiff_path):
return
def parseArgs():
"""
parse user input and return arguments
"""
parser = argparse.ArgumentParser(description = "test adxv lib")
parser.add_argument("-m", "--master_file", help="master file", type=str, default=None)
parser.add_argument("-n", "--img_num", help="image number", type=int, default=None)
parser.add_argument("-sp1", "--spot_file1", help="spot file1", type=str, default=None)
parser.add_argument("-sp2", "--spot_file2", help="spot file2", type=str, default=None)
parser.add_argument("-s", "--socket", help="socket number", type=int, default=8100)
return parser.parse_args()
if __name__ == "__main__":
args = parseArgs()
adxv = adxvSocketh5(port = args.socket, spot_type_file = "spot_type.txt")
# adxv = adxvSocketh5(spot_type_file = "spot_type.txt")
adxv.init()
# spots_file = "t1.adx"
# spots_file2 = "t3.adx"
# h5_cont = "/data/visitors/micromax/20231830/tests/ref_data/j4m_lys_2k/Lyso-2kHz_1_data_000001.h5"
spots_file = "/data/visitors/micromax/20231830/tests/dawn/preview/noindexed.adx"
spots_file2 = "/data/visitors/micromax/20231830/tests/dawn/preview/indexed.adx"
# adxv.load_image_from_h5(h5_cont,1)
tiff_path = "/data/visitors/micromax/20231830/tests/dawn/preview/test.tif"
adxv.load_tiff(tiff_path)
spots_sets_list=[]
total_spots_num =0
spots = adxv.read_spots_from_file(spots_file)
spots_sets_list.append(spots)
total_spots_num += spots.shape[0]
spots = adxv.read_spots_from_file(spots_file2)
spots_sets_list.append(spots)
total_spots_num += spots.shape[0]
adxv.add_spots(spots_sets_list, total_spots_num)
print(len(spots_sets_list), total_spots_num)

View File

@@ -1,168 +0,0 @@
# Uses DECTRIS Stream2 example
import zmq
import numpy as np
import sys
import subprocess
import os
import signal
import cbor2
import time
from dectris.compression import decompress
from tifffile import imwrite
import argparse
import socket
sys.path.append('/data/visitors/micromax/20231830/tests/sw/adxv_live_view')
import adxvSocketh5 #as jna5
from adxvSocketh5 import adxvSocketh5
TERMINATE = False
def decode_multi_dim_array(tag, column_major):
dimensions, contents = tag.value
if isinstance(contents, list):
array = np.empty((len(contents),), dtype=object)
array[:] = contents
elif isinstance(contents, (np.ndarray, np.generic)):
array = contents
else:
raise cbor2.CBORDecodeValueError("expected array or typed array")
return array.reshape(dimensions, order="F" if column_major else "C")
def decode_typed_array(tag, dtype):
if not isinstance(tag.value, bytes):
raise cbor2.CBORDecodeValueError("expected byte string in typed array")
return np.frombuffer(tag.value, dtype=dtype)
def decode_dectris_compression(tag):
algorithm, elem_size, encoded = tag.value
return decompress(encoded, algorithm, elem_size=elem_size)
def tag_hook(decoder, tag):
tag_decoder = tag_decoders.get(tag.tag)
return tag_decoder(tag) if tag_decoder else tag
tag_decoders = {
40: lambda tag: decode_multi_dim_array(tag, column_major=False),
64: lambda tag: decode_typed_array(tag, dtype="u1"),
65: lambda tag: decode_typed_array(tag, dtype=">u2"),
66: lambda tag: decode_typed_array(tag, dtype=">u4"),
67: lambda tag: decode_typed_array(tag, dtype=">u8"),
68: lambda tag: decode_typed_array(tag, dtype="u1"),
69: lambda tag: decode_typed_array(tag, dtype="<u2"),
70: lambda tag: decode_typed_array(tag, dtype="<u4"),
71: lambda tag: decode_typed_array(tag, dtype="<u8"),
72: lambda tag: decode_typed_array(tag, dtype="i1"),
73: lambda tag: decode_typed_array(tag, dtype=">i2"),
74: lambda tag: decode_typed_array(tag, dtype=">i4"),
75: lambda tag: decode_typed_array(tag, dtype=">i8"),
77: lambda tag: decode_typed_array(tag, dtype="<i2"),
78: lambda tag: decode_typed_array(tag, dtype="<i4"),
79: lambda tag: decode_typed_array(tag, dtype="<i8"),
80: lambda tag: decode_typed_array(tag, dtype=">f2"),
81: lambda tag: decode_typed_array(tag, dtype=">f4"),
82: lambda tag: decode_typed_array(tag, dtype=">f8"),
83: lambda tag: decode_typed_array(tag, dtype=">f16"),
84: lambda tag: decode_typed_array(tag, dtype="<f2"),
85: lambda tag: decode_typed_array(tag, dtype="<f4"),
86: lambda tag: decode_typed_array(tag, dtype="<f8"),
87: lambda tag: decode_typed_array(tag, dtype="<f16"),
1040: lambda tag: decode_multi_dim_array(tag, column_major=True),
56500: lambda tag: decode_dectris_compression(tag),
}
if len(sys.argv) != 2:
print("Please provide address: python preview.py <address>")
sys.exit(1)
address = sys.argv[1]
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.RCVTIMEO = 20000 # in milliseconds
socket.connect(address) #plut in the address here, get address from sys.argv[1]
#socket.connect("tcp://127.0.0.1:5400")
socket.setsockopt(zmq.SUBSCRIBE, b"")
#===========trying to start adxv in the background listening to images=======
#command = "/data/visitors/micromax/20231830/tests/sw/adxv_live_view/start_adxv.sh -p 8100"
#background_process = subprocess.Popen(f"{command} &", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#try:
# Simulate Ctrl + Z by sending SIGTSTP to the subprocess
# os.kill(background_process.pid, signal.SIGTSTP)
#except KeyboardInterrupt:
# Handle Ctrl + C if needed
# pass
# Optionally, capture the output and error (if needed)
#output, error = background_process.communicate()
# Check if there was an error
#if background_process.returncode != 0:
# print(f"Error occurred: {error.decode('utf-8')}")
#else:
# print("Command started in the background.")
#===========trying to start adxv in the background listening to images=======
adxv = adxvSocketh5(port = 8100, spot_type_file = "/data/visitors/micromax/20231830/tests/sw/adxv_live_view/spot_type.txt")
adxv.init()
print("adxv init...")
spots_file = "noindexed.adx"
spots_file2 = "indexed.adx"
tiff_path = "/data/visitors/micromax/20231830/tests/dawn/preview/test.tif"
while not TERMINATE:
try:
msg = socket.recv()
msg = cbor2.loads(msg, tag_hook=tag_hook)
imwrite('test.tif', msg['data']['default'])
title = msg['series_unique_id']
beam_center_x = msg['beam_center_x']
beam_center_y = msg['beam_center_y']
detector_distance = msg['detector_distance']
energy = msg['incident_energy']
with open('indexed.adx', 'w') as file0, open('noindexed.adx', 'w') as file1:
for i in range(len(msg["spots"])):
index_status=msg["spots"][i]["indexed"]
if index_status:
#print("indexed: ",msg["spots"][i])
# Write i string to the file
x0=int(msg["spots"][i]["x"])
y0=int(msg["spots"][i]["y"])
ind0=float(msg["spots"][i]["I"])
thing_to_write0 = "{} {} {} 1 1 \n".format(x0, y0, ind0)
file0.write(thing_to_write0)
else:
# Write a string to the file
x1=str(msg["spots"][i]["x"])
y1=str(msg["spots"][i]["y"])
ind1=str(msg["spots"][i]["I"])
thing_to_write1 = "{} {} {} 1 1 \n".format(x1, y1, ind1)
file1.write(thing_to_write1)
print('writes')
#print(msg["spots"][1])
#print(msg["spots"][3])
#print(len(msg["spots"]))
adxv.load_tiff(tiff_path)
spots_sets_list=[]
total_spots_num =0
spots = adxv.read_spots_from_file(spots_file)
spots_sets_list.append(spots)
total_spots_num += spots.shape[0]
spots = adxv.read_spots_from_file(spots_file2)
spots_sets_list.append(spots)
total_spots_num += spots.shape[0]
adxv.add_spots(spots_sets_list, total_spots_num)
#print(len(spots_sets_list), total_spots_num)
print("Running try")
except:
print("Running except")
pass

View File

@@ -1,2 +0,0 @@
1 white 10
2 green 20

View File

@@ -1,29 +0,0 @@
pkill -9 -f "adxv -socket"
source /mxn/groups/sw/mxsw/env_setup/adxv_env.sh
trial=1
port=8100
pids=""
while [ $trial -le 10 ]
do
adxv -socket $port tau1_000001.cbf 2> ~/.adxv_err &
sleep 1
err=`grep "cannot bind socket" ~/.adxv_err`
pids="$pids $!"
if [ -z "$err" ]
then
echo "start adxv with port $port"
break
fi
echo "$port is used already, will try another port"
port=`expr $port + 1`
trial=`expr $trial + 1`
done
#echo $pids
pids="$pids $!"
#trap "kill -15 $pids" 2 15
#change adxv window size
echo "will change adxv window size"
sleep 5
xdotool search --onlyvisible --name "Adxv -" |awk '{system("xdotool windowsize "$1 " 1000 1000")}'
wait

View File

@@ -1,4 +1,4 @@
// Copyright (2019-2023) Paul Scherrer Institute
// Copyright (2019-2024) Paul Scherrer Institute
#include "JFJochReceiver.h"
@@ -8,32 +8,23 @@
#include "../common/DiffractionGeometry.h"
#include "ImageMetadata.h"
#include "../resonet/NeuralNetResPredictor.h"
inline std::string time_UTC(const std::chrono::time_point<std::chrono::system_clock> &input) {
auto time_ms = std::chrono::duration_cast<std::chrono::milliseconds>(input.time_since_epoch()).count();
char buf1[255], buf2[255];
time_t time = time_ms / (1000);
strftime(buf1, sizeof(buf1), "%FT%T", gmtime(&time));
snprintf(buf2, sizeof(buf2), ".%06ld", time_ms%1000);
return std::string(buf1) + std::string(buf2) + "Z";
}
#include "../common/time_utc.h"
JFJochReceiver::JFJochReceiver(const DiffractionExperiment& in_experiment,
const JFCalibration *in_calibration,
AcquisitionDeviceGroup &in_aq_device,
ImagePusher &in_image_sender,
Logger &in_logger, int64_t in_forward_and_sum_nthreads,
ImagePusher* in_preview_publisher,
const NUMAHWPolicy &in_numa_policy,
const SpotFindingSettings &in_spot_finding_settings) :
const SpotFindingSettings &in_spot_finding_settings,
SendBuffer &buf) :
experiment(in_experiment),
calibration(in_calibration),
send_buf_ctrl(experiment, buf),
acquisition_device(in_aq_device),
logger(in_logger),
image_pusher(in_image_sender),
frame_transformation_nthreads(in_forward_and_sum_nthreads),
preview_publisher(in_preview_publisher),
ndatastreams(experiment.GetDataStreamsNum()),
data_acquisition_ready(ndatastreams),
frame_transformation_ready((experiment.GetImageNum() > 0) ? frame_transformation_nthreads : 0),
@@ -64,26 +55,11 @@ JFJochReceiver::JFJochReceiver(const DiffractionExperiment& in_experiment,
logger.Debug("Acquisition device {} prepared", d);
}
for (int d = 0; d < ndatastreams; d++)
data_acquisition_futures.emplace_back(std::async(std::launch::async, &JFJochReceiver::AcquireThread,
this, d));
logger.Info("Data acquisition devices ready");
if (experiment.GetImageNum() > 0) {
StartMessage message{};
experiment.FillMessage(message);
message.arm_date = time_UTC(std::chrono::system_clock::now());
message.az_int_bin_number = az_int_mapping.GetBinNumber();
message.az_int_bin_to_q = az_int_mapping.GetBinToQ();
FillStartMessageMask(message);
if (preview_publisher != nullptr)
preview_publisher->StartDataCollection(message);
FillStartMessageCalibration(message);
if (push_images_to_writer)
image_pusher.StartDataCollection(message);
SendStartMessage();
SendCalibration();
for (int i = 0; i < experiment.GetImageNum(); i++)
images_to_go.Put(i);
@@ -99,20 +75,58 @@ JFJochReceiver::JFJochReceiver(const DiffractionExperiment& in_experiment,
logger.Info("Image compression/forwarding threads ready");
}
for (int d = 0; d < ndatastreams; d++)
data_acquisition_futures.emplace_back(std::async(std::launch::async, &JFJochReceiver::AcquireThread,
this, d));
data_acquisition_ready.wait();
start_time = std::chrono::system_clock::now();
measurement = std::async(std::launch::async, &JFJochReceiver::FinalizeMeasurement, this);
logger.Info("Receiving data started");
}
void JFJochReceiver::FillStartMessageCalibration(StartMessage &message) {
if ((calibration == nullptr) || !experiment.GetSaveCalibration())
void JFJochReceiver::SendStartMessage() {
if (!push_images_to_writer)
return;
StartMessage message{};
experiment.FillMessage(message);
message.arm_date = time_UTC(std::chrono::system_clock::now());
message.az_int_bin_number = az_int_mapping.GetBinNumber();
message.az_int_bin_to_q = az_int_mapping.GetBinToQ();
message.write_master_file = true;
std::vector<uint32_t> nexus_mask;
if (calibration != nullptr) {
nexus_mask = calibration->CalculateNexusMask(experiment, 0);
size_t xpixel = experiment.GetXPixelsNum();
size_t ypixel = experiment.GetYPixelsNum();
CompressedImage image{
.data = (uint8_t *) nexus_mask.data(),
.size = nexus_mask.size() * sizeof(nexus_mask[0]),
.xpixel = xpixel,
.ypixel = ypixel,
.pixel_depth_bytes = sizeof(nexus_mask[0]),
.pixel_is_signed = false,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::NO_COMPRESSION,
.channel = "default"
};
message.AddPixelMask(image);
}
image_pusher.StartDataCollection(message);
}
void JFJochReceiver::SendCalibration() {
if ((calibration == nullptr) || !experiment.GetSaveCalibration() || !push_images_to_writer)
return;
JFJochBitShuffleCompressor compressor(CompressionAlgorithm::BSHUF_LZ4);
size_t xpixel = experiment.GetXPixelsNum();
size_t ypixel = experiment.GetYPixelsNum();
size_t xpixel = RAW_MODULE_COLS;
size_t ypixel = experiment.GetModulesNum() * RAW_MODULE_LINES;
for (int sc = 0; sc < experiment.GetStorageCellNumber(); sc++) {
for (int gain = 0; gain < 3; gain++) {
@@ -133,36 +147,11 @@ void JFJochReceiver::FillStartMessageCalibration(StartMessage &message) {
.algorithm = CompressionAlgorithm::BSHUF_LZ4,
.channel = channel
};
message.AddCalibration(image);
image_pusher.SendCalibration(image);
}
}
}
void JFJochReceiver::FillStartMessageMask(StartMessage &message) {
if (calibration == nullptr)
return;
auto nexus_mask = calibration->CalculateNexusMask(experiment, 0);
size_t xpixel = experiment.GetXPixelsNum();
size_t ypixel = experiment.GetYPixelsNum();
CompressedImage image{
.data = (uint8_t *) nexus_mask.data(),
.size = nexus_mask.size() * sizeof(nexus_mask[0]),
.xpixel = xpixel,
.ypixel = ypixel,
.pixel_depth_bytes = sizeof(nexus_mask[0]),
.pixel_is_signed = false,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::NO_COMPRESSION,
.channel = "default"
};
image.Save();
message.AddPixelMask(image);
}
void JFJochReceiver::AcquireThread(uint16_t data_stream) {
try {
NUMAHWPolicy::RunOnNode(acquisition_device[data_stream].GetNUMANode());
@@ -282,7 +271,6 @@ void JFJochReceiver::FrameTransformationThread() {
#endif
FrameTransformation transformation(experiment);
std::vector<char> writer_buffer(experiment.GetMaxCompressedSize());
MXAnalyzer analyzer(experiment);
@@ -350,6 +338,7 @@ void JFJochReceiver::FrameTransformationThread() {
bool indexed = analyzer.Process(message, local_spot_finding_settings);
message.receiver_free_send_buf = send_buf_ctrl.GetAvailBufLocations();
message.az_int_profile = az_int_profile_image.GetResult();
message.bkg_estimate = az_int_profile_image.GetMeanValueOfBins(az_int_min_bin, az_int_max_bin);
@@ -363,28 +352,34 @@ void JFJochReceiver::FrameTransformationThread() {
}
plots.Add(message, image_number % experiment.GetTimePointNumber(), az_int_profile_image);
message.image = transformation.GetCompressedImage();
compressed_size += message.image.size;
if (preview_counter.GeneratePreview()) {
if (preview_counter.GeneratePreview())
preview_image.UpdateImage(transformation.GetImage(), message.spots);
if (preview_publisher)
preview_publisher->SendImage(message);
}
if (indexed) {
if (preview_counter_indexed.GeneratePreview())
if (indexed && preview_counter_indexed.GeneratePreview())
preview_image_indexed.UpdateImage(transformation.GetImage(), message.spots);
}
if (push_images_to_writer) {
if (image_pusher.SendImage(message)) {
auto loc = send_buf_ctrl.GetBufLocation();
if (loc != nullptr) {
auto writer_buffer = (uint8_t *)loc->get_ptr();
CBORStream2Serializer serializer(writer_buffer, experiment.GetSendBufferLocationSize());
PrepareCBORImage(message, experiment, nullptr, 0);
serializer.SerializeImage(message);
if (experiment.GetSendBufferLocationSize() - serializer.GetImageAppendOffset()
< experiment.GetMaxCompressedSize())
throw JFJochException(JFJochExceptionCategory::ArrayOutOfBounds,
"Not enough memory to save image");
size_t image_size = transformation.CompressImage(writer_buffer + serializer.GetImageAppendOffset());
serializer.AppendImage(image_size);
compressed_size += image_size;
image_pusher.SendImage(writer_buffer, serializer.GetBufferSize(), image_number, loc);
images_sent++; // Handle case when image not sent properly
UpdateMaxImage(image_number);
}
}
if (!metadata.IsBunchIDConsistent())
logger.Warning("Bunch ID inconsistent for image {}", image_number);
images_collected++;
logger.Debug("Frame transformation thread - done sending image {}", image_number);
} catch (const JFJochException &e) {
@@ -458,7 +453,6 @@ void JFJochReceiver::FinalizeMeasurement() {
message.max_receiver_delay = max_delay;
message.efficiency = GetEfficiency();
message.end_date = time_UTC(std::chrono::system_clock::now());
message.write_master_file = true;
message.series_id = experiment.GetSeriesID();
message.series_unique_id = experiment.GetSeriesIDString();
@@ -488,6 +482,11 @@ void JFJochReceiver::FinalizeMeasurement() {
RetrievePedestal();
logger.Info("Devices stopped");
if (!send_buf_ctrl.CheckIfBufferReturned(std::chrono::seconds(10))) {
logger.Error("Send commands not finalized in 10 seconds");
throw JFJochException(JFJochExceptionCategory::ZeroMQ, "Send commands not finalized in 10 seconds");
}
logger.Info("Receiving data done");
}
@@ -563,7 +562,7 @@ JFJochReceiverStatus JFJochReceiver::GetStatus() const {
bkg_estimate = tmp;
if ((experiment.GetImageNum() > 0) && (compressed_size > 0)) {
compressed_ratio = static_cast<double> (images_collected
compressed_ratio = static_cast<double> (images_sent
* experiment.GetPixelDepth()
* experiment.GetModulesNum()
* RAW_MODULE_SIZE)

View File

@@ -18,8 +18,8 @@
#include "../frame_serialize/ImagePusher.h"
#include "../common/Logger.h"
#include "../common/ThreadSafeFIFO.h"
#include "../frame_serialize/ZMQStream2PreviewPublisher.h"
#include "../common/NUMAHWPolicy.h"
#include "../common/SendBufferControl.h"
#include "../image_analysis/StrongPixelSet.h"
#include "../image_analysis/AzimuthalIntegrationMapping.h"
@@ -60,6 +60,7 @@ class JFJochReceiver {
const DiffractionExperiment &experiment;
const JFCalibration *calibration;
std::vector<JFModulePedestal> pedestal_result;
SendBufferControl send_buf_ctrl;
Logger &logger;
@@ -75,8 +76,6 @@ class JFJochReceiver {
ThreadSafeFIFO<uint64_t> images_to_go;
ImagePusher *preview_publisher = nullptr;
ImagePusher &image_pusher;
bool push_images_to_writer;
@@ -122,18 +121,17 @@ class JFJochReceiver {
SpotFindingSettings GetSpotFindingSettings();
void UpdateMaxImage(uint64_t image_number);
void UpdateMaxDelay(uint64_t delay);
void FillStartMessageCalibration(StartMessage &message);
void FillStartMessageMask(StartMessage &message);
void SendStartMessage();
void SendCalibration();
public:
JFJochReceiver(const DiffractionExperiment& experiment,
const JFCalibration *calibration,
AcquisitionDeviceGroup &acquisition_devices,
ImagePusher &image_pusher,
Logger &logger, int64_t forward_and_sum_nthreads,
ImagePusher* preview_publisher,
const NUMAHWPolicy &numa_policy,
const SpotFindingSettings &spot_finding_settings);
const SpotFindingSettings &spot_finding_settings,
SendBuffer &buffer);
~JFJochReceiver();
JFJochReceiver(const JFJochReceiver &other) = delete;
JFJochReceiver& operator=(const JFJochReceiver &other) = delete;

View File

@@ -28,12 +28,15 @@ void JFJochReceiverPlots::Add(const DataMessage &msg, uint64_t file_number, cons
strong_pixels.AddElement(msg.number, msg.strong_pixel_count);
image_collection_efficiency.AddElement(msg.number, msg.image_collection_efficiency);
receiver_delay.AddElement(msg.number, msg.receiver_aq_dev_delay.value());
if (msg.receiver_aq_dev_delay)
receiver_delay.AddElement(msg.number, msg.receiver_aq_dev_delay.value());
if (msg.receiver_free_send_buf)
receiver_free_send_buf.AddElement(msg.number, msg.receiver_free_send_buf.value());
indexing_solution.AddElement(msg.number, msg.indexing_result);
indexing_solution_per_file.Add(file_number, msg.indexing_result);
if (msg.resolution_estimation)
if (msg.resolution_estimation)
resolution_estimation.Add(msg.resolution_estimation.value());
az_int_profile += profile;
@@ -93,6 +96,8 @@ MultiLinePlot JFJochReceiverPlots::GetPlots(const PlotRequest &request) {
return image_collection_efficiency.GetMeanPlot(nbins);
case PlotType::ReceiverDelay:
return receiver_delay.GetMeanPlot(nbins);
case PlotType::ReceiverFreeSendBuf:
return receiver_free_send_buf.GetMeanPlot(nbins);
case PlotType::StrongPixels:
return strong_pixels.GetMeanPlot(nbins);
case PlotType::ROISum:

View File

@@ -23,6 +23,7 @@ class JFJochReceiverPlots {
StatusVector<uint64_t> error_pixels;
StatusVector<uint64_t> strong_pixels;
StatusVector<uint64_t> receiver_delay;
StatusVector<uint64_t> receiver_free_send_buf;
StatusVector<float> image_collection_efficiency;
StatusVector<float> unit_cell[6];
SetAverage<uint64_t> indexing_solution_per_file;

View File

@@ -3,10 +3,11 @@
#include "JFJochReceiverService.h"
JFJochReceiverService::JFJochReceiverService(AcquisitionDeviceGroup &in_aq_devices,
Logger &in_logger, ImagePusher &pusher) :
Logger &in_logger, ImagePusher &pusher,
size_t send_buffer_size_MiB) :
logger(in_logger), aq_devices(in_aq_devices),
image_pusher(pusher), spot_finding_settings(DiffractionExperiment::DefaultDataProcessingSettings()) {
}
image_pusher(pusher), spot_finding_settings(DiffractionExperiment::DefaultDataProcessingSettings()),
buffer(send_buffer_size_MiB * 1024 * 1024) {}
JFJochReceiverService& JFJochReceiverService::NumThreads(int64_t input) {
if (input <= 0)
@@ -15,11 +16,6 @@ JFJochReceiverService& JFJochReceiverService::NumThreads(int64_t input) {
return *this;
}
JFJochReceiverService& JFJochReceiverService::PreviewPublisher(ImagePusher *in_preview_writer) {
preview_publisher = in_preview_writer;
return *this;
}
JFJochReceiverService &JFJochReceiverService::NUMAPolicy(const NUMAHWPolicy &policy) {
numa_policy = policy;
return *this;
@@ -59,8 +55,9 @@ void JFJochReceiverService::Start(const DiffractionExperiment &experiment, const
// ensure that everything was rolled back
receiver = std::make_unique<JFJochReceiver>(experiment, calibration,
aq_devices, image_pusher,
logger, nthreads, preview_publisher, numa_policy,
spot_finding_settings);
logger, nthreads, numa_policy,
spot_finding_settings,
buffer);
try {
// Don't want to stop
receiver->SetSpotFindingSettings(spot_finding_settings);

View File

@@ -13,9 +13,9 @@ class JFJochReceiverService {
std::unique_ptr<JFJochReceiver> receiver;
AcquisitionDeviceGroup &aq_devices;
Logger &logger;
SendBuffer buffer;
ImagePusher &image_pusher;
ImagePusher *preview_publisher = nullptr;
int64_t nthreads = 8;
enum class ReceiverState {Idle, Running};
@@ -27,8 +27,9 @@ class JFJochReceiverService {
SpotFindingSettings spot_finding_settings;
public:
JFJochReceiverService(AcquisitionDeviceGroup &aq_devices,
Logger &logger, ImagePusher &pusher);
JFJochReceiverService& PreviewPublisher(ImagePusher *in_preview_writer);
Logger &logger,
ImagePusher &pusher,
size_t send_buffer_size_MiB = 512);
JFJochReceiverService& NumThreads(int64_t input);
JFJochReceiverService& NUMAPolicy(const NUMAHWPolicy& policy);
JFJochReceiverService& NUMAPolicy(const std::string& policy);

View File

@@ -35,23 +35,23 @@ static JFCalibration GeneratePedestalCalibration(const DiffractionExperiment &x)
bool JFJochReceiverTest(JFJochReceiverOutput &output, Logger &logger,
AcquisitionDeviceGroup &aq_devices, const DiffractionExperiment &x,
uint16_t nthreads,
ZMQStream2PreviewPublisher *in_preview_writer,
const std::string &numa_policy) {
const std::string &numa_policy,
size_t send_buf_size_MiB) {
std::vector<uint16_t> raw_expected_image(RAW_MODULE_SIZE * x.GetModulesNum());
for (int i = 0; i < raw_expected_image.size(); i++)
raw_expected_image[i] = i % 65536;
return JFJochReceiverTest(output, logger, aq_devices, x, raw_expected_image, nthreads, in_preview_writer, numa_policy);
return JFJochReceiverTest(output, logger, aq_devices, x, raw_expected_image, nthreads, numa_policy, send_buf_size_MiB);
}
bool JFJochReceiverTest(JFJochReceiverOutput &output, Logger &logger,
AcquisitionDeviceGroup &aq_devices, const DiffractionExperiment &x,
const std::vector<uint16_t> &raw_expected_image,
uint16_t nthreads,
ZMQStream2PreviewPublisher *in_preview_writer,
const std::string &numa_policy) {
const std::string &numa_policy,
size_t send_buf_size_MiB) {
JFCalibration calib = GeneratePedestalCalibration(x);
@@ -64,8 +64,8 @@ bool JFJochReceiverTest(JFJochReceiverOutput &output, Logger &logger,
TestImagePusher pusher(image_number);
JFJochReceiverService service(aq_devices, logger, pusher);
service.PreviewPublisher(in_preview_writer).NumThreads(nthreads).NUMAPolicy(numa_policy);
JFJochReceiverService service(aq_devices, logger, pusher, send_buf_size_MiB);
service.NumThreads(nthreads).NUMAPolicy(numa_policy);
service.LoadInternalGeneratorImage(x, raw_expected_image, 0);
SpotFindingSettings settings = DiffractionExperiment::DefaultDataProcessingSettings();

View File

@@ -8,14 +8,14 @@
bool JFJochReceiverTest(JFJochReceiverOutput &output, Logger &logger,
AcquisitionDeviceGroup &aq_devices, const DiffractionExperiment &x,
uint16_t nthreads,
ZMQStream2PreviewPublisher *in_preview_writer = nullptr,
const std::string &numa_policy = "");
const std::string &numa_policy = "",
size_t send_buf_size_MiB = 1024);
bool JFJochReceiverTest(JFJochReceiverOutput &output, Logger &logger,
AcquisitionDeviceGroup &aq_devices, const DiffractionExperiment &x,
const std::vector<uint16_t> &data,
uint16_t nthreads,
ZMQStream2PreviewPublisher *in_preview_writer = nullptr,
const std::string &numa_policy = "");
const std::string &numa_policy = "",
size_t send_buf_size_MiB = 1024);
#endif //JUNGFRAUJOCH_JFJOCHRECEIVERTEST_H

View File

@@ -10,16 +10,17 @@
void print_usage(Logger &logger) {
logger.Info("Usage ./jfjoch_action_test {<options>} <path to repository>");
logger.Info("Options:");
logger.Info(" -R raw");
logger.Info(" -v verbose");
logger.Info(" -S<num> number of summed frames");
logger.Info(" -I use 32-bit integer");
logger.Info(" -s<num> number of data streams (acquisition devices)");
logger.Info(" -m<num> number of modules");
logger.Info(" -i<num> number of images");
logger.Info(" -N<num> number of image processing threads");
logger.Info(" -P<txt> NUMA Policy (none|n2g2|n8g4|n8g4_hbm), none is default");
logger.Info(" -R raw");
logger.Info(" -v verbose");
logger.Info(" -S<num> number of summed frames");
logger.Info(" -I use 32-bit integer");
logger.Info(" -s<num> number of data streams (acquisition devices)");
logger.Info(" -m<num> number of modules");
logger.Info(" -i<num> number of images");
logger.Info(" -N<num> number of image processing threads");
logger.Info(" -P<txt> NUMA Policy (none|n2g2|n8g4|n8g4_hbm), none is default");
logger.Info(" -D<path> use resonet deep learning model for resolution estimation - path to TorchScript");
logger.Info(" -B<num> size of send buffer in MiB (default 2048)");
}
int main(int argc, char **argv) {
@@ -39,6 +40,7 @@ int main(int argc, char **argv) {
std::string resonet_path;
DetectorType detector_type = DetectorType::JUNGFRAU;
bool hls_simulation = false;
size_t send_buffer_size_MiB = 2048;
if (argc == 1) {
print_usage(logger);
@@ -46,7 +48,7 @@ int main(int argc, char **argv) {
}
int opt;
while ((opt = getopt(argc, argv, "s:i:m:N:P:vRIS:D:EH")) != -1) {
while ((opt = getopt(argc, argv, "s:i:m:N:P:vRIS:D:EHB:")) != -1) {
switch (opt) {
case 'i':
nimages = atol(optarg);
@@ -84,6 +86,9 @@ int main(int argc, char **argv) {
case 'H':
hls_simulation = true;
break;
case 'B':
send_buffer_size_MiB = atol(optarg);
break;
default: /* '?' */
print_usage(logger);
exit(EXIT_FAILURE);
@@ -161,7 +166,7 @@ int main(int argc, char **argv) {
bool ret;
std::thread run_thread([&] {
try {
ret = JFJochReceiverTest(output, logger, aq_devices, x, input, nthreads, nullptr, numa_policy_name);
ret = JFJochReceiverTest(output, logger, aq_devices, x, input, nthreads, numa_policy_name, send_buffer_size_MiB);
} catch (std::exception &e) {
logger.Error(e.what());
ret = false;

View File

@@ -5,7 +5,6 @@
#include "../frame_serialize/CBORStream2Serializer.h"
#include "../frame_serialize/CBORStream2Deserializer.h"
#include "../compression/JFJochCompressor.h"
#include "stream2.h"
#include "../frame_serialize/CborUtil.h"
TEST_CASE("CBORSerialize_Start", "[CBOR]") {
@@ -64,6 +63,7 @@ TEST_CASE("CBORSerialize_Start", "[CBOR]") {
.az_int_bin_to_q = {0.1, 0.2, 0.3, 0.5},
.total_flux = 123,
.attenuator_transmission = 0.345,
.write_master_file = true,
.user_data = "Some random string 12345"
};
@@ -140,6 +140,7 @@ TEST_CASE("CBORSerialize_Start", "[CBOR]") {
CHECK(output_message.roi_names == message.roi_names);
CHECK(output_message.countrate_correction_enabled == message.countrate_correction_enabled);
CHECK(output_message.flatfield_enabled == message.flatfield_enabled);
CHECK(output_message.write_master_file == message.write_master_file);
}
TEST_CASE("CBORSerialize_Start_PixelMask", "[CBOR]") {
@@ -180,16 +181,14 @@ TEST_CASE("CBORSerialize_Start_PixelMask", "[CBOR]") {
CHECK(memcmp(output_message.pixel_mask[0].data, mask.data(), mask.size() * sizeof(float)) == 0);
}
TEST_CASE("CBORSerialize_Start_Calibration", "[CBOR]") {
TEST_CASE("CBORSerialize_Calibration", "[CBOR]") {
std::vector<uint8_t> buffer(8*1024*1024);
CBORStream2Serializer serializer(buffer.data(), buffer.size());
std::vector<float> calib1(256);
std::vector<float> calib2(256);
for (int i = 0; i < 256; i++) {
calib1[i] = i * 34.567;
calib2[i] = i * 76.33456;
}
CompressedImage image1 {
@@ -204,41 +203,19 @@ TEST_CASE("CBORSerialize_Start_Calibration", "[CBOR]") {
.channel = "calib1"
};
CompressedImage image2 {
.data = reinterpret_cast<uint8_t *>(calib2.data()),
.size = 16 * 16 * sizeof(float),
.xpixel = 16,
.ypixel = 16,
.pixel_depth_bytes = 4,
.pixel_is_signed = true,
.pixel_is_float = true,
.algorithm = CompressionAlgorithm::NO_COMPRESSION,
.channel = "calib2"
};
StartMessage message {};
message.AddCalibration(image2);
message.AddCalibration(image1);
REQUIRE_NOTHROW(serializer.SerializeSequenceStart(message));
REQUIRE_NOTHROW(serializer.SerializeCalibration(image1));
CBORStream2Deserializer deserializer;
REQUIRE_NOTHROW(deserializer.Process(buffer.data(), serializer.GetBufferSize()));
REQUIRE(deserializer.GetType() == CBORStream2Deserializer::Type::START);
REQUIRE(deserializer.GetType() == CBORStream2Deserializer::Type::CALIBRATION);
StartMessage output_message;
REQUIRE_NOTHROW(output_message = deserializer.GetStartMessage());
REQUIRE(output_message.calibration.size() == 2);
CHECK(output_message.calibration[0].pixel_is_float);
CHECK(output_message.calibration[1].pixel_is_float);
CHECK(output_message.calibration[0].xpixel == 16);
CHECK(output_message.calibration[0].ypixel == 16);
CHECK(output_message.calibration[1].xpixel == 16);
CHECK(output_message.calibration[1].ypixel == 16);
CHECK(output_message.calibration[0].channel == "calib2");
CHECK(output_message.calibration[1].channel == "calib1");
CHECK(memcmp(output_message.calibration[0].data, calib2.data(), 256 * sizeof(float)) == 0);
CHECK(memcmp(output_message.calibration[1].data, calib1.data(), 256 * sizeof(float)) == 0);
CompressedImage output_message;
REQUIRE_NOTHROW(output_message = deserializer.GetCalibrationImage());
CHECK(output_message.pixel_is_float);
CHECK(output_message.xpixel == 16);
CHECK(output_message.ypixel == 16);
CHECK(output_message.channel == "calib1");
CHECK(memcmp(output_message.data, calib1.data(), 256 * sizeof(float)) == 0);
}
TEST_CASE("CBORSerialize_End", "[CBOR]") {
@@ -251,7 +228,6 @@ TEST_CASE("CBORSerialize_End", "[CBOR]") {
.images_sent_to_write_count = 40000,
.max_receiver_delay = 3456,
.efficiency = 0.99,
.write_master_file = true,
.end_date = "ccc",
.series_unique_id = "bla5",
.series_id = 45676782
@@ -272,7 +248,6 @@ TEST_CASE("CBORSerialize_End", "[CBOR]") {
REQUIRE(output_message.efficiency);
REQUIRE(output_message.efficiency == Approx(message.efficiency.value()));
REQUIRE(output_message.end_date == message.end_date);
REQUIRE(output_message.write_master_file.value() == message.write_master_file.value());
REQUIRE(output_message.series_id == message.series_id);
REQUIRE(output_message.series_unique_id == message.series_unique_id);
REQUIRE(output_message.az_int_result.empty());
@@ -286,7 +261,6 @@ CBORStream2Serializer serializer(buffer.data(), buffer.size());
.max_image_number = 57789,
.max_receiver_delay = 3456,
.efficiency = 0.99,
.write_master_file = true,
.end_date = "ccc",
.series_unique_id = "bla5",
.series_id = 45676782
@@ -318,7 +292,6 @@ TEST_CASE("CBORSerialize_End_ADUHistogram", "[CBOR]") {
.max_image_number = 57789,
.max_receiver_delay = 3456,
.efficiency = 0.99,
.write_master_file = true,
.end_date = "ccc",
.series_unique_id = "bla5",
.series_id = 45676782,
@@ -711,239 +684,48 @@ TEST_CASE("CBORSerialize_Image_ROI", "[CBOR]") {
CHECK(image_array.roi["roi1"].pixels == message.roi["roi1"].pixels);
}
inline bool CmpString(const char *str1, const std::string& str2) {
return (strncmp(str1, str2.c_str(), str2.size()) == 0);
}
TEST_CASE("CBORSerialize_Start_stream2", "[CBOR]") {
TEST_CASE("CBORSerialize_Image_Append", "[CBOR]") {
std::vector<uint8_t> buffer(8*1024*1024);
CBORStream2Serializer serializer(buffer.data(), buffer.size());
std::vector<uint32_t> mask(456*457, 15);
CompressedImage image_mask {
.data = reinterpret_cast<uint8_t *>(mask.data()),
.size = 456 * 457 * sizeof(uint32_t),
.xpixel = 456,
.ypixel = 457,
.pixel_depth_bytes = 4,
.pixel_is_signed = false,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::NO_COMPRESSION,
.channel = "sc0"
};
StartMessage message {
.detector_distance = 0.0005,
.beam_center_x = 456.6,
.beam_center_y = 124.3,
.number_of_images = 34567,
.image_size_x = 456,
.image_size_y = 457,
.pixel_bit_depth = 32,
.pixel_signed = true,
.incident_energy = 12400,
.incident_wavelength = 0.988,
.frame_time = 0.0001,
.count_time = 0.000098,
.saturation_value = 65535,
.pixel_size_x = 0.000075,
.pixel_size_y = 0.000075,
.sensor_thickness = 0.0005,
.sensor_material = "Si",
.unit_cell = UnitCell{.a = 45,.b = 37, .c = 45, .alpha = 90, .beta = 108,. gamma = 120},
.space_group_number = 154,
.max_spot_count = 250,
.storage_cell_number = 16,
.pixel_mask_enabled = true,
.arm_date = "abc",
.sample_name = "lyso",
.file_prefix = "lyso1/dir/file",
.images_per_file = 1,
.channels = {"default", "sc2"},
.detector_description = "EIGER 16M",
.detector_serial_number = "123",
.series_unique_id = "bla",
.series_id = 4567,
.omega = {
.increment = 0.1f,
.start = 10.0f
},
.detector_translation = {0.5f, 0.0f, 0.5f},
.source_name = "Swiss Light Source",
.source_name_short = "SLS",
.instrument_name = "X06SA",
.instrument_name_short = "PXIII",
.az_int_bin_number = 35,
.summation = 567
};
message.AddPixelMask(image_mask);
REQUIRE_NOTHROW(serializer.SerializeSequenceStart(message));
stream2_msg *msg;
auto ret = stream2_parse_msg(buffer.data(), serializer.GetBufferSize(), &msg);
REQUIRE(ret == STREAM2_OK);
CHECK(msg->type == STREAM2_MSG_START);
auto msg2 = (stream2_start_msg *) msg;
CHECK(msg2->beam_center_x == message.beam_center_x);
CHECK(msg2->beam_center_y == message.beam_center_y);
CHECK(msg2->image_size_x == message.image_size_x);
CHECK(msg2->image_size_y == message.image_size_y);
CHECK(msg2->series_id == message.series_id);
CHECK(CmpString(msg2->series_unique_id, message.series_unique_id));
CHECK(CmpString(msg2->detector_description, message.detector_description));
CHECK(CmpString(msg2->arm_date, message.arm_date));
CHECK(msg2->number_of_images == message.number_of_images);
CHECK(msg2->frame_time == message.frame_time);
CHECK(msg2->count_time == message.count_time);
CHECK(msg2->pixel_mask_enabled == message.pixel_mask_enabled);
CHECK(msg2->pixel_mask.len == 1);
CHECK(CmpString(msg2->pixel_mask.ptr[0].channel, "sc0"));
CHECK(msg2->pixel_mask.ptr[0].pixel_mask.dim[0] == 457);
CHECK(msg2->pixel_mask.ptr[0].pixel_mask.dim[1] == 456);
CHECK(msg2->pixel_mask.ptr[0].pixel_mask.array.data.len == 456 * 457 * sizeof(uint32_t));
CHECK(memcmp(msg2->pixel_mask.ptr[0].pixel_mask.array.data.ptr, mask.data(), 456 * 457 * sizeof(uint32_t)) == 0);
CHECK(msg2->pixel_mask.ptr[0].pixel_mask.array.tag == TagUnsignedInt32BitLE);
stream2_free_msg(msg);
}
TEST_CASE("CBORSerialize_End_stream2", "[CBOR]") {
std::vector<uint8_t> buffer(8*1024*1024);
CBORStream2Serializer serializer(buffer.data(), buffer.size());
EndMessage message {
.max_image_number = 57789,
.max_receiver_delay = 3456,
.efficiency = 0.99,
.write_master_file = true,
.end_date = "ccc",
.series_unique_id = "bla5",
.series_id = 45676782
};
REQUIRE_NOTHROW(serializer.SerializeSequenceEnd(message));
stream2_msg *msg;
auto ret = stream2_parse_msg(buffer.data(), serializer.GetBufferSize(), &msg);
REQUIRE(ret == STREAM2_OK);
CHECK(msg->type == STREAM2_MSG_END);
auto msg2 = (stream2_end_msg *) msg;
CHECK(msg2->series_id == message.series_id);
CHECK(CmpString(msg2->series_unique_id, message.series_unique_id));
stream2_free_msg(msg);
}
TEST_CASE("CBORSerialize_Image_compressed_stream2", "[CBOR]") {
std::vector<uint8_t> buffer(8*1024*1024);
CBORStream2Serializer serializer(buffer.data(), buffer.size());
std::vector<SpotToSave> spots;
std::vector<uint16_t> test(512*1024);
for (int i = 0; i < test.size(); i++)
test[i] = (i * 253 + 56) % 256;
std::vector<uint8_t> compressed_test(512*1024*4);
JFJochBitShuffleCompressor compressor(CompressionAlgorithm::BSHUF_LZ4);
size_t compressed_size = compressor.Compress(compressed_test.data(), test);
CompressedImage image {
.data = compressed_test.data(),
.size = compressed_size,
.xpixel = 1024,
.ypixel = 512,
.pixel_depth_bytes = 2,
.pixel_is_signed = false,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::BSHUF_LZ4,
.channel = "default"
};
DataMessage message {
.number = 456,
.image = image,
.spots = spots
};
REQUIRE_NOTHROW(serializer.SerializeImage(message));
stream2_msg *msg;
auto ret = stream2_parse_msg(buffer.data(), serializer.GetBufferSize(), &msg);
REQUIRE(ret == STREAM2_OK);
CHECK(msg->type == STREAM2_MSG_IMAGE);
auto msg2 = (stream2_image_msg *) msg;
CHECK(msg2->image_id == message.number);
CHECK(msg2->data.len == 1);
CHECK(CmpString(msg2->data.ptr[0].channel, "default"));
CHECK(msg2->data.ptr[0].data.dim[0] == 512);
CHECK(msg2->data.ptr[0].data.dim[1] == 1024);
CHECK(CmpString(msg2->data.ptr[0].data.array.data.compression.algorithm, "bslz4"));
CHECK(msg2->data.ptr[0].data.array.data.compression.elem_size == 2);
CHECK(msg2->data.ptr[0].data.array.data.compression.orig_size == 1024 * 512 * 2);
REQUIRE(msg2->data.ptr[0].data.array.data.len == compressed_size);
CHECK(memcmp(msg2->data.ptr[0].data.array.data.ptr, compressed_test.data(), compressed_size) == 0);
CHECK(msg2->data.ptr[0].data.array.tag == TagUnsignedInt16BitLE);
CHECK(msg2->series_id == message.series_id);
CHECK(CmpString(msg2->series_unique_id, message.series_unique_id));
stream2_free_msg(msg);
}
TEST_CASE("CBORSerialize_Image_uncompressed_stream2", "[CBOR]") {
std::vector<uint8_t> buffer(8*1024*1024);
CBORStream2Serializer serializer(buffer.data(), buffer.size());
std::vector<SpotToSave> spots;
std::vector<uint8_t> test(512*1024);
std::vector<uint8_t> test(512 * 1024);
for (int i = 0; i < test.size(); i++)
test[i] = (i * 253 + 56) % 256;
CompressedImage image {
.data = test.data(),
.size = 1024 * 512,
CompressedImage image{
.data = nullptr,
.size = 0,
.xpixel = 1024,
.ypixel = 512,
.pixel_depth_bytes = 1,
.pixel_is_signed = false,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::NO_COMPRESSION,
.channel = "default"
};
DataMessage message {
.number = 456,
.image = image,
.spots = spots
DataMessage message{
.number = 480,
.image = image
};
REQUIRE_NOTHROW(serializer.SerializeImage(message));
memcpy(buffer.data() + serializer.GetImageAppendOffset(), test.data(), 512 * 1024);
//REQUIRE_THROWS(serializer.AppendImage(16*1024*1024));
REQUIRE_NOTHROW(serializer.AppendImage(512 * 1024));
CBORStream2Deserializer deserializer;
stream2_msg *msg;
REQUIRE_NOTHROW(deserializer.Process(buffer.data(), serializer.GetBufferSize()));
REQUIRE(deserializer.GetType() == CBORStream2Deserializer::Type::IMAGE);
auto ret = stream2_parse_msg(buffer.data(), serializer.GetBufferSize(), &msg);
REQUIRE(ret == STREAM2_OK);
CHECK(msg->type == STREAM2_MSG_IMAGE);
auto msg2 = (stream2_image_msg *) msg;
CHECK(msg2->image_id == message.number);
CHECK(msg2->data.len == 1);
CHECK(CmpString(msg2->data.ptr->channel, "default"));
CHECK(msg2->data.ptr[0].data.array.data.compression.algorithm == nullptr);
REQUIRE(msg2->data.ptr[0].data.array.data.len == 512*1024);
CHECK(memcmp(msg2->data.ptr[0].data.array.data.ptr, test.data(), 512*1024) == 0);
CHECK(msg2->data.ptr[0].data.array.tag == TagUnsignedInt8Bit);
CHECK(msg2->series_id == message.series_id);
CHECK(CmpString(msg2->series_unique_id, message.series_unique_id));
stream2_free_msg(msg);
auto image_array = deserializer.GetDataMessage();
REQUIRE(image_array.image.algorithm == CompressionAlgorithm::NO_COMPRESSION);
REQUIRE(image_array.image.xpixel == 1024);
REQUIRE(image_array.image.ypixel == 512);
REQUIRE(image_array.image.pixel_depth_bytes == 1);
REQUIRE(!image_array.image.pixel_is_signed);
REQUIRE(image_array.image.channel == "default");
REQUIRE(image_array.image.size == test.size());
REQUIRE(image_array.indexing_result == message.indexing_result);
REQUIRE(image_array.number == 480);
REQUIRE(memcmp(image_array.image.data, test.data(), test.size()) == 0);
}

View File

@@ -20,7 +20,7 @@ ADD_EXECUTABLE(CatchTest
JFCalibrationTest.cpp
AzimuthalIntegrationTest.cpp
StatusVectorTest.cpp
CBORTest.cpp ../tests/stream2.h ../tests/stream2.c
CBORTest.cpp
DetectorGeometryTest.cpp JFJochBrokerParserTest.cpp DetectorSetupTest.cpp DiffractionGeometryTest.cpp
FPGASpotFindingUnitTest.cpp
PreviewCounterTest.cpp
@@ -35,6 +35,7 @@ ADD_EXECUTABLE(CatchTest
FPGAEigerReorderTest.cpp
ROIMaskTest.cpp
LossyFilterTest.cpp
SendBufferTest.cpp
)
target_link_libraries(CatchTest JFJochBroker JFJochReceiver JFJochWriter JFJochImageAnalysis JFJochCommon JFJochHLSSimulation JFJochImageExport JFJochResonet)

View File

@@ -126,11 +126,15 @@ TEST_CASE("HDF5MasterFile", "[HDF5][Full]") {
x.FillMessage(start_message);
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
REQUIRE_NOTHROW(HDF5Metadata::NXmx(start_message, end_message));
std::unique_ptr<NXmx> master = std::make_unique<NXmx>(start_message);
master->Finalize(end_message);
master.reset();
x.FilePrefix("test02");
x.FillMessage(start_message);
REQUIRE_NOTHROW(HDF5Metadata::NXmx(start_message, end_message));
master = std::make_unique<NXmx>(start_message);
master->Finalize(end_message);
master.reset();
}
remove("test01_master.h5");
remove("test02_master.h5");
@@ -159,7 +163,9 @@ TEST_CASE("HDF5MasterFile_RadInt", "[HDF5][Full]") {
end_message.az_int_result["avg1"] = profile.GetResult();
end_message.az_int_result["avg2"] = profile.GetResult();
REQUIRE_NOTHROW(HDF5Metadata::NXmx(start_message, end_message));
std::unique_ptr<NXmx> master = std::make_unique<NXmx>(start_message);
master->Finalize(end_message);
master.reset();
}
remove("test01_rad_int_master.h5");
// No leftover HDF5 objects
@@ -337,7 +343,9 @@ TEST_CASE("HDF5Writer_Link", "[HDF5][Full]") {
REQUIRE_NOTHROW(writer.Write(message));
}
REQUIRE_NOTHROW(HDF5Metadata::NXmx(start_message, end_message));
std::unique_ptr<NXmx> master = std::make_unique<NXmx>(start_message);
master->Finalize(end_message);
master.reset();
}
{
HDF5ReadOnlyFile file("link_master.h5");
@@ -393,7 +401,9 @@ TEST_CASE("HDF5Writer_Link_zero_images", "[HDF5][Full]") {
EndMessage end_message;
end_message.max_image_number = 0;
REQUIRE_NOTHROW(HDF5Metadata::NXmx(start_message, end_message));
std::unique_ptr<NXmx> master = std::make_unique<NXmx>(start_message);
master->Finalize(end_message);
master.reset();
}
{
std::unique_ptr<HDF5ReadOnlyFile> file;

View File

@@ -2,14 +2,17 @@
#include <catch2/catch.hpp>
#include <fstream>
#include "../receiver/JFJochReceiverTest.h"
#include "../acquisition_device/HLSSimulatedDevice.h"
#include "../writer/HDF5Objects.h"
#include "../writer/HDF5ImagePusher.h"
#include "../receiver/JFJochReceiverService.h"
#include "../common/DiffractionGeometry.h"
#include "../export_images/WriteTIFF.h"
#include <fstream>
#include "../common/ZMQWrappers.h"
#include "../frame_serialize/ZMQStream2Pusher.h"
#include "../writer/StreamWriter.h"
TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index", "[JFJochReceiver]") {
Logger logger("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index");
@@ -49,10 +52,11 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index", "[JFJochReceiver]
aq_devices.Add(std::move(test));
JFCalibration calib(experiment);
HDF5ImagePusher pusher;
ZMQContext context;
ZMQStream2Pusher pusher(context, "ipc://*");
StreamWriter writer(context, logger, pusher.GetAddress());
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
context.NumThreads(4);
JFJochReceiverService service(aq_devices, logger, pusher);
@@ -82,6 +86,8 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index", "[JFJochReceiver]
CHECK(receiver_out.status.indexing_rate == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(!receiver_out.status.cancelled);
REQUIRE_NOTHROW(writer_future.get());
}
TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index_min_pix_2", "[JFJochReceiver]") {
@@ -121,12 +127,13 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index_min_pix_2", "[JFJoc
test->SetInternalGeneratorFrame((uint16_t *) image_raw_geom.data() + m * RAW_MODULE_SIZE, m);
aq_devices.Add(std::move(test));
JFCalibration calib(experiment);
HDF5ImagePusher pusher;
ZMQContext context;
context.NumThreads(4);
ZMQStream2Pusher pusher(context, "ipc://*");
StreamWriter writer(context, logger, pusher.GetAddress());
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
JFJochReceiverService service(aq_devices, logger, pusher);
service.NumThreads(nthreads);
@@ -154,6 +161,8 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_spot_and_index_min_pix_2", "[JFJoc
CHECK(receiver_out.status.indexing_rate == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(!receiver_out.status.cancelled);
REQUIRE_NOTHROW(writer_future.get());
}
TEST_CASE("GenerateResolutionMap") {
@@ -217,10 +226,11 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_resolution", "[JFJochReceiver]") {
test->SetInternalGeneratorFrame((uint16_t *) image_raw_geom.data() + m * RAW_MODULE_SIZE, m);
aq_devices.Add(std::move(test));
JFCalibration calib(experiment);
HDF5ImagePusher pusher;
ZMQContext context;
ZMQStream2Pusher pusher(context, "ipc://*");
StreamWriter writer(context, logger, pusher.GetAddress());
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
context.NumThreads(4);
JFJochReceiverService service(aq_devices, logger, pusher);
@@ -240,6 +250,8 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_lysozyme_resolution", "[JFJochReceiver]") {
CHECK(receiver_out.status.indexing_rate == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(!receiver_out.status.cancelled);
REQUIRE_NOTHROW(writer_future.get());
}
@@ -252,7 +264,7 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_ROI", "[JFJochReceiver]") {
DiffractionExperiment experiment(DetectorGeometry(8,2,8,36));
experiment.ImagesPerTrigger(5).NumTriggers(1).UseInternalPacketGenerator(true)
.FilePrefix("lyso_test").ConversionOnFPGA(false)
.FilePrefix("lyso_test_roi").ConversionOnFPGA(false)
.DetectorDistance_mm(75).BeamY_pxl(1136).BeamX_pxl(1090).PhotonEnergy_keV(12.4)
.SetUnitCell(UnitCell{.a = 36.9, .b = 78.95, .c = 78.95, .alpha =90, .beta = 90, .gamma = 90});
@@ -296,11 +308,12 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_ROI", "[JFJochReceiver]") {
aq_devices.Add(std::move(test));
JFCalibration calib(experiment);
HDF5ImagePusher pusher;
ZMQContext context;
context.NumThreads(4);
ZMQStream2Pusher pusher(context, "ipc://*");
StreamWriter writer(context, logger, pusher.GetAddress());
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
JFJochReceiverService service(aq_devices, logger, pusher);
service.NumThreads(nthreads);
@@ -330,4 +343,66 @@ TEST_CASE("JFJochIntegrationTest_ZMQ_ROI", "[JFJochReceiver]") {
CHECK(receiver_out.efficiency == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(!receiver_out.status.cancelled);
}
REQUIRE_NOTHROW(writer_future.get());
}
TEST_CASE("JFJochIntegrationTest_ZMQ_SaveCalibration", "[JFJochReceiver]") {
Logger logger("JFJochIntegrationTest_ZMQ_SaveCalibration");
RegisterHDF5Filter();
const uint16_t nthreads = 4;
DiffractionExperiment experiment(DetectorGeometry(8,2,8,36));
experiment.ImagesPerTrigger(5).NumTriggers(1).UseInternalPacketGenerator(true)
.FilePrefix("lyso_save_calibration").ConversionOnFPGA(false)
.DetectorDistance_mm(75).BeamY_pxl(1136).BeamX_pxl(1090).PhotonEnergy_keV(12.4)
.SaveCalibration(true);
// Load example image
HDF5ReadOnlyFile data("../../tests/test_data/compression_benchmark.h5");
HDF5DataSet dataset(data, "/entry/data/data");
HDF5DataSpace file_space(dataset);
REQUIRE(file_space.GetDimensions()[2] == experiment.GetXPixelsNum());
REQUIRE(file_space.GetDimensions()[1] == experiment.GetYPixelsNum());
std::vector<int16_t> image_conv (file_space.GetDimensions()[1] * file_space.GetDimensions()[2]);
std::vector<hsize_t> start = {5,0,0};
std::vector<hsize_t> file_size = {1, file_space.GetDimensions()[1], file_space.GetDimensions()[2]};
dataset.ReadVector(image_conv, start, file_size);
std::vector<int16_t> image_raw_geom(experiment.GetModulesNum() * RAW_MODULE_SIZE);
ConvertedToRawGeometry(experiment, image_raw_geom.data(), image_conv.data());
logger.Info("Loaded image");
// Setup acquisition device
AcquisitionDeviceGroup aq_devices;
std::unique_ptr<HLSSimulatedDevice> test = std::make_unique<HLSSimulatedDevice>(0, 64);
for (int m = 0; m < experiment.GetModulesNum(); m++)
test->SetInternalGeneratorFrame((uint16_t *) image_raw_geom.data() + m * RAW_MODULE_SIZE, m);
aq_devices.Add(std::move(test));
JFCalibration calib(experiment);
ZMQContext context;
context.NumThreads(4);
ZMQStream2Pusher pusher(context, "ipc://*");
StreamWriter writer(context, logger, pusher.GetAddress());
auto writer_future = std::async(std::launch::async, &StreamWriter::Run, &writer);
JFJochReceiverService service(aq_devices, logger, pusher);
service.NumThreads(nthreads);
service.Start(experiment, &calib);
auto receiver_out = service.Stop();
CHECK(receiver_out.efficiency == 1.0);
CHECK(receiver_out.status.images_sent == experiment.GetImageNum());
CHECK(!receiver_out.status.cancelled);
REQUIRE_NOTHROW(writer_future.get());
}

70
tests/SendBufferTest.cpp Normal file
View File

@@ -0,0 +1,70 @@
// Copyright (2019-2024) Paul Scherrer Institute
#include <catch2/catch.hpp>
#include "../common/SendBufferControl.h"
TEST_CASE("SendBuffer") {
SendBuffer buf(2*1024*1024);
REQUIRE_THROWS(buf.SetBufferLocationSize(4 * 1024 * 1024));
REQUIRE_NOTHROW(buf.SetBufferLocationSize(1024 * 1024));
REQUIRE(buf.GetNumOfLocations() == 2);
REQUIRE(buf.GetBufferLocationSize() == 1024*1024);
REQUIRE(buf.GetBufferLocationID(buf.GetBufferLocation(0)) == 0);
REQUIRE(buf.GetBufferLocationID(buf.GetBufferLocation(1)) == 1);
REQUIRE_THROWS(buf.GetBufferLocation(2));
REQUIRE_THROWS(buf.GetBufferLocationID(nullptr));
}
TEST_CASE("SendBufferControl") {
DiffractionExperiment experiment(DetectorGeometry(18,3,8,36));
SendBuffer buf(4*experiment.GetSendBufferLocationSize());
SendBufferControl buf_ctrl(experiment, buf);
REQUIRE(buf_ctrl.GetAvailBufLocations() == 4);
ZeroCopyReturnValue *ret0 = buf_ctrl.GetBufLocation();
REQUIRE(ret0 != nullptr);
REQUIRE(buf_ctrl.GetAvailBufLocations() == 3);
ZeroCopyReturnValue *ret1 = buf_ctrl.GetBufLocation();
REQUIRE(ret1 != nullptr);
ZeroCopyReturnValue *ret2 = buf_ctrl.GetBufLocation();
REQUIRE(ret2 != nullptr);
ZeroCopyReturnValue *ret3 = buf_ctrl.GetBufLocation();
REQUIRE(ret3 != nullptr);
REQUIRE(buf_ctrl.GetAvailBufLocations() == 0);
ZeroCopyReturnValue *ret4 = buf_ctrl.GetBufLocation();
REQUIRE(ret4 == nullptr);
ret0->release();
ret1->release();
ret2->release();
ret3->release();
REQUIRE(buf_ctrl.GetAvailBufLocations() == 4);
REQUIRE(buf_ctrl.CheckIfBufferReturned(std::chrono::microseconds(1)));
}
TEST_CASE("SendBufferControl_BuffersNotReturned") {
DiffractionExperiment experiment(DetectorGeometry(18,3,8,36));
SendBuffer buf(4*experiment.GetSendBufferLocationSize());
SendBufferControl buf_ctrl(experiment, buf);
REQUIRE(buf_ctrl.GetAvailBufLocations() == 4);
ZeroCopyReturnValue *ret0 = buf_ctrl.GetBufLocation();
REQUIRE(ret0 != nullptr);
REQUIRE(buf_ctrl.GetAvailBufLocations() == 3);
REQUIRE(!buf_ctrl.CheckIfBufferReturned(std::chrono::microseconds(1)));
}

View File

@@ -3,6 +3,8 @@
#include <catch2/catch.hpp>
#include <../common/ThreadSafeFIFO.h>
using namespace std::chrono_literals;
TEST_CASE("ThreadSafeFIFO","[ThreadSafeFIFO]") {
ThreadSafeFIFO<uint32_t> fifo;
uint32_t tmp;
@@ -49,6 +51,27 @@ TEST_CASE("ThreadSafeFIFO_LimitedSize","[ThreadSafeFIFO]") {
fifo.GetBlocking();
}
TEST_CASE("ThreadSafeFIFO_GetTimeout","[ThreadSafeFIFO]") {
ThreadSafeFIFO<uint32_t> fifo;
uint32_t tmp;
fifo.Put(0);
fifo.Put(1);
REQUIRE(fifo.GetTimeout(tmp, 1ms) == 1);
CHECK (tmp == 0);
fifo.Put(0);
REQUIRE(fifo.GetTimeout(tmp, 1ms)== 1);
CHECK (tmp == 1);
REQUIRE(fifo.GetTimeout(tmp, 1ms) == 1);
CHECK (tmp == 0);
REQUIRE(fifo.GetTimeout(tmp, 1ms) == 0);
REQUIRE(fifo.GetTimeout(tmp, 1ms) == 0);
}
TEST_CASE("ThreadSafeSet","[ThreadSafeFIFO]") {
ThreadSafeSet<uint32_t> set;
uint32_t tmp;

View File

@@ -44,9 +44,9 @@ void test_puller(ZMQImagePuller *puller,
}
puller->WaitForImage();
}
// frame is END
auto end = puller->GetEndMessage();
if ((!end.write_master_file) || (end.write_master_file.value() != (writer_id == 0)))
auto start = puller->GetStartMessage();
if ((!start.write_master_file) || (start.write_master_file.value() != (writer_id == 0)))
diff_content[writer_id]++;
}
@@ -58,7 +58,7 @@ TEST_CASE("ZMQImageCommTest_1Writer","[ZeroMQ]") {
DiffractionExperiment x(DetectorGeometry(1));
x.Mode(DetectorMode::Raw);
x.PedestalG0Frames(0).NumTriggers(1).UseInternalPacketGenerator(false).PhotonEnergy_keV(12.4)
.ImagesPerTrigger(nframes);
.ImagesPerTrigger(nframes);
std::vector<DiffractionSpot> empty_spot_vector;
std::vector<float> empty_rad_int_profile;
@@ -84,19 +84,22 @@ TEST_CASE("ZMQImageCommTest_1Writer","[ZeroMQ]") {
puller.Connect(pusher_addr[0]);
std::thread sender_thread = std::thread([&] {
std::vector<uint8_t> serialization_buffer(16*1024*1024);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
StartMessage message {
.images_per_file = 16
};
EndMessage end_message{
.write_master_file = true
.images_per_file = 16,
.write_master_file = true
};
EndMessage end_message{};
pusher.StartDataCollection(message);
for (int i = 0; i < nframes; i++) {
DataMessage data_message;
data_message.number = i;
PrepareCBORImage(data_message, x, image1.data() + i * x.GetPixelsNum(), x.GetPixelsNum() * sizeof(uint16_t));
pusher.SendImage(data_message);
serializer.SerializeImage(data_message);
pusher.SendImage(serialization_buffer.data(), serializer.GetBufferSize(), i);
}
pusher.EndDataCollection(end_message);
@@ -160,19 +163,22 @@ TEST_CASE("ZMQImageCommTest_2Writers","[ZeroMQ]") {
std::vector<size_t> diff_size(npullers), diff_content(npullers), diff_split(npullers), nimages(npullers);
std::thread sender_thread = std::thread([&] {
StartMessage message {
.images_per_file = 16
};
EndMessage end_message{
.write_master_file = true
};
pusher.StartDataCollection(message);
std::vector<uint8_t> serialization_buffer(16*1024*1024);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
StartMessage message {
.images_per_file = 16,
.write_master_file = true
};
EndMessage end_message{};
pusher.StartDataCollection(message);
for (int i = 0; i < nframes; i++) {
DataMessage data_message;
data_message.number = i;
PrepareCBORImage(data_message, x, image1.data() + i * x.GetPixelsNum(), x.GetPixelsNum() * sizeof(uint16_t));
pusher.SendImage(data_message);
serializer.SerializeImage(data_message);
pusher.SendImage(serialization_buffer.data(), serializer.GetBufferSize(), i);
}
pusher.EndDataCollection(end_message);
@@ -246,19 +252,23 @@ TEST_CASE("ZMQImageCommTest_4Writers","[ZeroMQ]") {
std::vector<size_t> diff_size(npullers), diff_content(npullers), diff_split(npullers), nimages(npullers);
std::thread sender_thread = std::thread([&] {
StartMessage message {
.images_per_file = 16
};
EndMessage end_message{
.write_master_file = true
};
pusher.StartDataCollection(message);
std::vector<uint8_t> serialization_buffer(16*1024*1024);
CBORStream2Serializer serializer(serialization_buffer.data(), serialization_buffer.size());
StartMessage message {
.images_per_file = 16,
.write_master_file = true
};
EndMessage end_message{};
pusher.StartDataCollection(message);
for (int i = 0; i < nframes; i++) {
DataMessage data_message;
data_message.number = i;
PrepareCBORImage(data_message, x, image1.data() + i * x.GetPixelsNum(), x.GetPixelsNum() * sizeof(uint16_t));
pusher.SendImage(data_message);
serializer.SerializeImage(data_message);
pusher.SendImage(serialization_buffer.data(), serializer.GetBufferSize(), i);
}
pusher.EndDataCollection(end_message);
@@ -294,7 +304,7 @@ TEST_CASE("ZMQImageCommTest_4Writers","[ZeroMQ]") {
TEST_CASE("ZMQImagePuller_abort","[ZeroMQ]") {
const size_t nframes = 256;
DiffractionExperiment x;
x.Mode(DetectorMode::Raw);
x.PedestalG0Frames(0).NumTriggers(1).UseInternalPacketGenerator(false).PhotonEnergy_keV(12.4)
@@ -336,9 +346,11 @@ TEST_CASE("ZMQImageCommTest_NoWriter","[ZeroMQ]") {
DataMessage data_message;
data_message.number = 1;
data_message.image = image;
REQUIRE(!pusher.SendImage(data_message));
std::vector<uint8_t> v(16*1024*1024);
CBORStream2Serializer serializer(v.data(), v.size());
serializer.SerializeImage(data_message);
REQUIRE(!pusher.SendImage(v.data(), serializer.GetBufferSize(), 1));
EndMessage end_message;
REQUIRE(!pusher.EndDataCollection(end_message));
}
}

View File

@@ -1,956 +0,0 @@
// Copyright (2019-2023) Paul Scherrer Institute
// DECTRIS proprietary license
#define __STDC_WANT_IEC_60559_TYPES_EXT__
#include "stream2.h"
#include <assert.h>
#include <float.h>
#if FLT16_MANT_DIG > 0 || __FLT16_MANT_DIG__ > 0
#else
#include <immintrin.h>
#if defined(_MSC_VER)
#include <intrin.h>
#endif
#endif
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include "../frame_serialize/tinycbor/src/cbor.h"
enum { MAX_KEY_LEN = 64 };
static const CborTag MULTI_DIMENSIONAL_ARRAY_ROW_MAJOR = 40;
static const CborTag DECTRIS_COMPRESSION = 56500;
static uint64_t read_u64_be(const uint8_t* buf) {
return ((uint64_t)buf[0] << 56) | ((uint64_t)buf[1] << 48) |
((uint64_t)buf[2] << 40) | ((uint64_t)buf[3] << 32) |
((uint64_t)buf[4] << 24) | ((uint64_t)buf[5] << 16) |
((uint64_t)buf[6] << 8) | (uint64_t)buf[7];
}
static enum stream2_result CBOR_RESULT(CborError e) {
switch (e) {
case CborNoError:
return STREAM2_OK;
case CborErrorUnknownLength:
case CborErrorDataTooLarge:
return STREAM2_ERROR_PARSE;
case CborErrorOutOfMemory:
return STREAM2_ERROR_OUT_OF_MEMORY;
default:
return STREAM2_ERROR_DECODE;
}
}
static enum stream2_result consume_byte_string_nocopy(const CborValue* it,
const uint8_t** bstr,
size_t* bstr_len,
CborValue* next) {
enum stream2_result r;
assert(cbor_value_is_byte_string(it));
if ((r = CBOR_RESULT(cbor_value_get_string_length(it, bstr_len))))
return r;
const uint8_t* ptr = cbor_value_get_next_byte(it);
assert(*ptr >= 0x40 && *ptr <= 0x5b);
switch (*ptr++) {
case 0x58:
ptr += 1;
break;
case 0x59:
ptr += 2;
break;
case 0x5a:
ptr += 4;
break;
case 0x5b:
ptr += 8;
break;
}
*bstr = ptr;
if (next) {
*next = *it;
if ((r = CBOR_RESULT(cbor_value_advance(next))))
return r;
}
return STREAM2_OK;
}
static enum stream2_result parse_key(CborValue* it, char key[MAX_KEY_LEN]) {
if (!cbor_value_is_text_string(it))
return STREAM2_ERROR_PARSE;
size_t key_len = MAX_KEY_LEN;
CborError e = cbor_value_copy_text_string(it, key, &key_len, it);
if (e == CborErrorOutOfMemory || key_len == MAX_KEY_LEN) {
// The key is longer than any we suppport. Return the empty key which
// should be handled by the caller like other unknown keys.
key[0] = '\0';
return STREAM2_OK;
}
return CBOR_RESULT(e);
}
static enum stream2_result parse_tag(CborValue* it, CborTag* value) {
enum stream2_result r;
if (!cbor_value_is_tag(it))
return STREAM2_ERROR_PARSE;
if ((r = CBOR_RESULT(cbor_value_get_tag(it, value))))
return r;
return CBOR_RESULT(cbor_value_advance_fixed(it));
}
static enum stream2_result parse_bool(CborValue* it, bool* value) {
enum stream2_result r;
if (!cbor_value_is_boolean(it))
return STREAM2_ERROR_PARSE;
if ((r = CBOR_RESULT(cbor_value_get_boolean(it, value))))
return r;
return CBOR_RESULT(cbor_value_advance_fixed(it));
}
static float half_to_float(uint16_t x) {
#if FLT16_MANT_DIG > 0 || __FLT16_MANT_DIG__ > 0
_Float16 f;
memcpy(&f, &x, 2);
return (float)f;
#else
//return _mm_cvtss_f32(_mm_cvtph_ps(_mm_cvtsi32_si128(x)));
return 0;
#endif
}
static enum stream2_result parse_double(CborValue* it, double* value) {
enum stream2_result r;
if (cbor_value_is_half_float(it)) {
uint16_t h;
if ((r = CBOR_RESULT(cbor_value_get_half_float(it, &h))))
return r;
*value = (double)half_to_float(h);
} else if (cbor_value_is_float(it)) {
float f;
if ((r = CBOR_RESULT(cbor_value_get_float(it, &f))))
return r;
*value = (double)f;
} else if (cbor_value_is_double(it)) {
if ((r = CBOR_RESULT(cbor_value_get_double(it, value))))
return r;
} else {
return STREAM2_ERROR_PARSE;
}
return CBOR_RESULT(cbor_value_advance_fixed(it));
}
static enum stream2_result parse_uint64(CborValue* it, uint64_t* value) {
enum stream2_result r;
if (!cbor_value_is_unsigned_integer(it))
return STREAM2_ERROR_PARSE;
if ((r = CBOR_RESULT(cbor_value_get_uint64(it, value))))
return r;
return CBOR_RESULT(cbor_value_advance_fixed(it));
}
static enum stream2_result parse_text_string(CborValue* it, char** tstr) {
if (!cbor_value_is_text_string(it))
return STREAM2_ERROR_PARSE;
size_t len;
return CBOR_RESULT(cbor_value_dup_text_string(it, tstr, &len, it));
}
static enum stream2_result parse_array_2_uint64(CborValue* it,
uint64_t array[2]) {
enum stream2_result r;
if (!cbor_value_is_array(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_array_length(it, &len))))
return r;
if (len != 2)
return STREAM2_ERROR_PARSE;
CborValue elt;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &elt))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_uint64(&elt, &array[i])))
return r;
}
return CBOR_RESULT(cbor_value_leave_container(it, &elt));
}
static enum stream2_result parse_dectris_compression(
CborValue* it,
struct stream2_compression* compression,
const uint8_t** bstr,
size_t* bstr_len) {
enum stream2_result r;
CborTag tag;
if ((r = parse_tag(it, &tag)))
return r;
if (tag != DECTRIS_COMPRESSION)
return STREAM2_ERROR_PARSE;
if (!cbor_value_is_array(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_array_length(it, &len))))
return r;
if (len != 3)
return STREAM2_ERROR_PARSE;
CborValue elt;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &elt))))
return r;
if ((r = parse_text_string(&elt, &compression->algorithm)))
return r;
if ((r = parse_uint64(&elt, &compression->elem_size)))
return r;
if (!cbor_value_is_byte_string(&elt))
return STREAM2_ERROR_PARSE;
if ((r = consume_byte_string_nocopy(&elt, bstr, bstr_len, &elt)))
return r;
// https://github.com/dectris/compression/blob/v0.2.3/src/compression.c#L42
if (strcmp(compression->algorithm, "bslz4") == 0 ||
strcmp(compression->algorithm, "lz4") == 0)
{
if (*bstr_len < 12)
return STREAM2_ERROR_DECODE;
compression->orig_size = read_u64_be(*bstr);
} else {
return STREAM2_ERROR_NOT_IMPLEMENTED;
}
return CBOR_RESULT(cbor_value_leave_container(it, &elt));
}
static enum stream2_result parse_bytes(CborValue* it,
struct stream2_bytes* bytes) {
enum stream2_result r;
if (cbor_value_is_tag(it)) {
CborTag tag;
if ((r = CBOR_RESULT(cbor_value_get_tag(it, &tag))))
return r;
if (tag == DECTRIS_COMPRESSION) {
return parse_dectris_compression(it, &bytes->compression,
&bytes->ptr, &bytes->len);
} else {
return STREAM2_ERROR_PARSE;
}
}
if (!cbor_value_is_byte_string(it))
return STREAM2_ERROR_PARSE;
bytes->compression.algorithm = NULL;
bytes->compression.elem_size = 0;
bytes->compression.orig_size = 0;
return consume_byte_string_nocopy(it, &bytes->ptr, &bytes->len, it);
}
// Parses a typed array from [RFC 8746 section 2].
//
// [RFC 8746 section 2]:
// https://www.rfc-editor.org/rfc/rfc8746.html#name-typed-arrays
static enum stream2_result parse_typed_array(CborValue* it,
struct stream2_typed_array* array,
uint64_t* len) {
enum stream2_result r;
if ((r = parse_tag(it, &array->tag)))
return r;
if ((r = parse_bytes(it, &array->data)))
return r;
uint64_t elem_size;
if ((r = stream2_typed_array_elem_size(array, &elem_size)))
return r;
uint64_t size;
if (array->data.compression.algorithm == NULL)
size = array->data.len;
else
size = array->data.compression.orig_size;
if (size % elem_size != 0)
return STREAM2_ERROR_PARSE;
*len = size / elem_size;
return STREAM2_OK;
}
// Parses a multi-dimensional array from [RFC 8746 section 3.1.1].
//
// [RFC 8746 section 3.1.1]:
// https://www.rfc-editor.org/rfc/rfc8746.html#name-row-major-order
static enum stream2_result parse_multidim_array(
CborValue* it,
struct stream2_multidim_array* multidim) {
enum stream2_result r;
CborTag tag;
if ((r = parse_tag(it, &tag)))
return r;
if (tag != MULTI_DIMENSIONAL_ARRAY_ROW_MAJOR)
return STREAM2_ERROR_PARSE;
if (!cbor_value_is_array(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_array_length(it, &len))))
return r;
if (len != 2)
return STREAM2_ERROR_PARSE;
CborValue elt;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &elt))))
return r;
if ((r = parse_array_2_uint64(&elt, multidim->dim)))
return r;
uint64_t array_len;
if ((r = parse_typed_array(&elt, &multidim->array, &array_len)))
return r;
if (multidim->dim[0] * multidim->dim[1] != array_len)
return STREAM2_ERROR_PARSE;
return CBOR_RESULT(cbor_value_leave_container(it, &elt));
}
static enum stream2_result parse_goniometer_axis(
CborValue* it,
struct stream2_goniometer_axis* axis) {
enum stream2_result r;
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
while (cbor_value_is_valid(&field)) {
char key[MAX_KEY_LEN];
if ((r = parse_key(&field, key)))
return r;
if ((r = CBOR_RESULT(cbor_value_skip_tag(&field))))
return r;
if (strcmp(key, "increment") == 0) {
if ((r = parse_double(&field, &axis->increment)))
return r;
} else if (strcmp(key, "start") == 0) {
if ((r = parse_double(&field, &axis->start)))
return r;
} else {
if ((r = CBOR_RESULT(cbor_value_advance(&field))))
return r;
}
}
return CBOR_RESULT(cbor_value_leave_container(it, &field));
}
static enum stream2_result parse_goniometer(
CborValue* it,
struct stream2_goniometer* goniometer) {
enum stream2_result r;
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
while (cbor_value_is_valid(&field)) {
char key[MAX_KEY_LEN];
if ((r = parse_key(&field, key)))
return r;
if ((r = CBOR_RESULT(cbor_value_skip_tag(&field))))
return r;
if (strcmp(key, "chi") == 0) {
if ((r = parse_goniometer_axis(&field, &goniometer->chi)))
return r;
} else if (strcmp(key, "kappa") == 0) {
if ((r = parse_goniometer_axis(&field, &goniometer->kappa)))
return r;
} else if (strcmp(key, "omega") == 0) {
if ((r = parse_goniometer_axis(&field, &goniometer->omega)))
return r;
} else if (strcmp(key, "phi") == 0) {
if ((r = parse_goniometer_axis(&field, &goniometer->phi)))
return r;
} else if (strcmp(key, "two_theta") == 0) {
if ((r = parse_goniometer_axis(&field, &goniometer->two_theta)))
return r;
} else {
if ((r = CBOR_RESULT(cbor_value_advance(&field))))
return r;
}
}
return CBOR_RESULT(cbor_value_leave_container(it, &field));
}
static enum stream2_result parse_user_data(
CborValue* it,
struct stream2_user_data* user_data) {
enum stream2_result r;
const uint8_t* ptr = cbor_value_get_next_byte(it);
if ((r = CBOR_RESULT(cbor_value_advance(it))))
return r;
user_data->ptr = ptr;
user_data->len = cbor_value_get_next_byte(it) - ptr;
return STREAM2_OK;
}
static enum stream2_result parse_start_msg(CborValue* it,
struct stream2_msg** msg_out) {
enum stream2_result r;
struct stream2_start_msg* msg = calloc(1, sizeof(struct stream2_start_msg));
*msg_out = (struct stream2_msg*)msg;
if (msg == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->type = STREAM2_MSG_START;
msg->countrate_correction_lookup_table.tag = UINT64_MAX;
while (cbor_value_is_valid(it)) {
char key[MAX_KEY_LEN];
if ((r = parse_key(it, key)))
return r;
// skip any tag for a value, except where verified
if (strcmp(key, "countrate_correction_lookup_table") != 0) {
if ((r = CBOR_RESULT(cbor_value_skip_tag(it))))
return r;
}
if (strcmp(key, "series_id") == 0) {
if ((r = parse_uint64(it, &msg->series_id)))
return r;
} else if (strcmp(key, "series_unique_id") == 0) {
if ((r = parse_text_string(it, &msg->series_unique_id)))
return r;
} else if (strcmp(key, "arm_date") == 0) {
if ((r = parse_text_string(it, &msg->arm_date)))
return r;
} else if (strcmp(key, "beam_center_x") == 0) {
if ((r = parse_double(it, &msg->beam_center_x)))
return r;
} else if (strcmp(key, "beam_center_y") == 0) {
if ((r = parse_double(it, &msg->beam_center_y)))
return r;
} else if (strcmp(key, "channels") == 0) {
if (!cbor_value_is_array(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_array_length(it, &len))))
return r;
msg->channels.ptr = calloc(len, sizeof(char*));
if (msg->channels.ptr == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->channels.len = len;
CborValue elt;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &elt))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_text_string(&elt, &msg->channels.ptr[i])))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &elt))))
return r;
} else if (strcmp(key, "count_time") == 0) {
if ((r = parse_double(it, &msg->count_time)))
return r;
} else if (strcmp(key, "countrate_correction_enabled") == 0) {
if ((r = parse_bool(it, &msg->countrate_correction_enabled)))
return r;
} else if (strcmp(key, "countrate_correction_lookup_table") == 0) {
uint64_t len;
if ((r = parse_typed_array(
it, &msg->countrate_correction_lookup_table, &len)))
return r;
} else if (strcmp(key, "detector_description") == 0) {
if ((r = parse_text_string(it, &msg->detector_description)))
return r;
} else if (strcmp(key, "detector_serial_number") == 0) {
if ((r = parse_text_string(it, &msg->detector_serial_number)))
return r;
} else if (strcmp(key, "detector_translation") == 0) {
if (!cbor_value_is_array(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_array_length(it, &len))))
return r;
if (len != 3)
return STREAM2_ERROR_PARSE;
CborValue elt;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &elt))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_double(&elt, &msg->detector_translation[i])))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &elt))))
return r;
} else if (strcmp(key, "flatfield") == 0) {
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_map_length(it, &len))))
return r;
msg->flatfield.ptr = calloc(len, sizeof(struct stream2_flatfield));
if (msg->flatfield.ptr == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->flatfield.len = len;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_text_string(&field,
&msg->flatfield.ptr[i].channel)))
return r;
if ((r = parse_multidim_array(
&field, &msg->flatfield.ptr[i].flatfield)))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &field))))
return r;
} else if (strcmp(key, "flatfield_enabled") == 0) {
if ((r = parse_bool(it, &msg->flatfield_enabled)))
return r;
} else if (strcmp(key, "frame_time") == 0) {
if ((r = parse_double(it, &msg->frame_time)))
return r;
} else if (strcmp(key, "goniometer") == 0) {
if ((r = parse_goniometer(it, &msg->goniometer)))
return r;
} else if (strcmp(key, "image_size_x") == 0) {
if ((r = parse_uint64(it, &msg->image_size_x)))
return r;
} else if (strcmp(key, "image_size_y") == 0) {
if ((r = parse_uint64(it, &msg->image_size_y)))
return r;
} else if (strcmp(key, "incident_energy") == 0) {
if ((r = parse_double(it, &msg->incident_energy)))
return r;
} else if (strcmp(key, "incident_wavelength") == 0) {
if ((r = parse_double(it, &msg->incident_wavelength)))
return r;
} else if (strcmp(key, "number_of_images") == 0) {
if ((r = parse_uint64(it, &msg->number_of_images)))
return r;
} else if (strcmp(key, "pixel_mask") == 0) {
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_map_length(it, &len))))
return r;
msg->pixel_mask.ptr =
calloc(len, sizeof(struct stream2_pixel_mask));
if (msg->pixel_mask.ptr == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->pixel_mask.len = len;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_text_string(&field,
&msg->pixel_mask.ptr[i].channel)))
return r;
if ((r = parse_multidim_array(
&field, &msg->pixel_mask.ptr[i].pixel_mask)))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &field))))
return r;
} else if (strcmp(key, "pixel_mask_enabled") == 0) {
if ((r = parse_bool(it, &msg->pixel_mask_enabled)))
return r;
} else if (strcmp(key, "pixel_size_x") == 0) {
if ((r = parse_double(it, &msg->pixel_size_x)))
return r;
} else if (strcmp(key, "pixel_size_y") == 0) {
if ((r = parse_double(it, &msg->pixel_size_y)))
return r;
} else if (strcmp(key, "saturation_value") == 0) {
if ((r = parse_uint64(it, &msg->saturation_value)))
return r;
} else if (strcmp(key, "sensor_material") == 0) {
if ((r = parse_text_string(it, &msg->sensor_material)))
return r;
} else if (strcmp(key, "sensor_thickness") == 0) {
if ((r = parse_double(it, &msg->sensor_thickness)))
return r;
} else if (strcmp(key, "threshold_energy") == 0) {
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_map_length(it, &len))))
return r;
msg->threshold_energy.ptr =
calloc(len, sizeof(struct stream2_threshold_energy));
if (msg->threshold_energy.ptr == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->threshold_energy.len = len;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_text_string(
&field, &msg->threshold_energy.ptr[i].channel)))
return r;
if ((r = parse_double(&field,
&msg->threshold_energy.ptr[i].energy)))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &field))))
return r;
} else if (strcmp(key, "user_data") == 0) {
if ((r = parse_user_data(it, &msg->user_data)))
return r;
} else if (strcmp(key, "virtual_pixel_interpolation_enabled") == 0) {
if ((r = parse_bool(it, &msg->virtual_pixel_interpolation_enabled)))
return r;
} else {
if ((r = CBOR_RESULT(cbor_value_advance(it))))
return r;
}
}
return STREAM2_OK;
}
static enum stream2_result parse_image_msg(CborValue* it,
struct stream2_msg** msg_out) {
enum stream2_result r;
struct stream2_image_msg* msg = calloc(1, sizeof(struct stream2_image_msg));
*msg_out = (struct stream2_msg*)msg;
if (msg == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->type = STREAM2_MSG_IMAGE;
while (cbor_value_is_valid(it)) {
char key[MAX_KEY_LEN];
if ((r = parse_key(it, key)))
return r;
if ((r = CBOR_RESULT(cbor_value_skip_tag(it))))
return r;
if (strcmp(key, "series_id") == 0) {
if ((r = parse_uint64(it, &msg->series_id)))
return r;
} else if (strcmp(key, "series_unique_id") == 0) {
if ((r = parse_text_string(it, &msg->series_unique_id)))
return r;
} else if (strcmp(key, "image_id") == 0) {
if ((r = parse_uint64(it, &msg->image_id)))
return r;
} else if (strcmp(key, "real_time") == 0) {
if ((r = parse_array_2_uint64(it, msg->real_time)))
return r;
} else if (strcmp(key, "series_date") == 0) {
if ((r = parse_text_string(it, &msg->series_date)))
return r;
} else if (strcmp(key, "start_time") == 0) {
if ((r = parse_array_2_uint64(it, msg->start_time)))
return r;
} else if (strcmp(key, "stop_time") == 0) {
if ((r = parse_array_2_uint64(it, msg->stop_time)))
return r;
} else if (strcmp(key, "user_data") == 0) {
if ((r = parse_user_data(it, &msg->user_data)))
return r;
} else if (strcmp(key, "data") == 0) {
if (!cbor_value_is_map(it))
return STREAM2_ERROR_PARSE;
size_t len;
if ((r = CBOR_RESULT(cbor_value_get_map_length(it, &len))))
return r;
msg->data.ptr = calloc(len, sizeof(struct stream2_image_data));
if (msg->data.ptr == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->data.len = len;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(it, &field))))
return r;
for (size_t i = 0; i < len; i++) {
if ((r = parse_text_string(&field, &msg->data.ptr[i].channel)))
return r;
if ((r = parse_multidim_array(&field, &msg->data.ptr[i].data)))
return r;
}
if ((r = CBOR_RESULT(cbor_value_leave_container(it, &field))))
return r;
} else {
if ((r = CBOR_RESULT(cbor_value_advance(it))))
return r;
}
}
return STREAM2_OK;
}
static enum stream2_result parse_end_msg(CborValue* it,
struct stream2_msg** msg_out) {
enum stream2_result r;
struct stream2_end_msg* msg = calloc(1, sizeof(struct stream2_end_msg));
*msg_out = (struct stream2_msg*)msg;
if (msg == NULL)
return STREAM2_ERROR_OUT_OF_MEMORY;
msg->type = STREAM2_MSG_END;
while (cbor_value_is_valid(it)) {
char key[MAX_KEY_LEN];
if ((r = parse_key(it, key)))
return r;
if ((r = CBOR_RESULT(cbor_value_skip_tag(it))))
return r;
if (strcmp(key, "series_id") == 0) {
if ((r = parse_uint64(it, &msg->series_id)))
return r;
} else if (strcmp(key, "series_unique_id") == 0) {
if ((r = parse_text_string(it, &msg->series_unique_id)))
return r;
} else {
if ((r = CBOR_RESULT(cbor_value_advance(it))))
return r;
}
}
return STREAM2_OK;
}
static enum stream2_result parse_msg_type(CborValue* it,
char type[MAX_KEY_LEN]) {
enum stream2_result r;
char key[MAX_KEY_LEN];
if ((r = parse_key(it, key)))
return r;
if (strcmp(key, "type") != 0)
return STREAM2_ERROR_PARSE;
if ((r = CBOR_RESULT(cbor_value_skip_tag(it))))
return r;
return parse_key(it, type);
}
static enum stream2_result parse_msg(const uint8_t* buffer,
size_t size,
struct stream2_msg** msg_out) {
enum stream2_result r;
// https://www.rfc-editor.org/rfc/rfc8949.html#name-self-described-cbor
const uint8_t MAGIC[3] = {0xd9, 0xd9, 0xf7};
if (size < sizeof(MAGIC) || memcmp(buffer, MAGIC, sizeof(MAGIC)) != 0)
return STREAM2_ERROR_SIGNATURE;
buffer += sizeof(MAGIC);
size -= sizeof(MAGIC);
CborParser parser;
CborValue it;
if ((r = CBOR_RESULT(cbor_parser_init(buffer, size, 0, &parser, &it))))
return r;
if (!cbor_value_is_map(&it))
return STREAM2_ERROR_PARSE;
CborValue field;
if ((r = CBOR_RESULT(cbor_value_enter_container(&it, &field))))
return r;
char type[MAX_KEY_LEN];
if ((r = parse_msg_type(&field, type)))
return r;
if (strcmp(type, "start") == 0) {
if ((r = parse_start_msg(&field, msg_out)))
return r;
} else if (strcmp(type, "image") == 0) {
if ((r = parse_image_msg(&field, msg_out)))
return r;
} else if (strcmp(type, "end") == 0) {
if ((r = parse_end_msg(&field, msg_out)))
return r;
} else {
return STREAM2_ERROR_PARSE;
}
return CBOR_RESULT(cbor_value_leave_container(&it, &field));
}
enum stream2_result stream2_parse_msg(const uint8_t* buffer,
size_t size,
struct stream2_msg** msg_out) {
enum stream2_result r;
*msg_out = NULL;
if ((r = parse_msg(buffer, size, msg_out))) {
if (*msg_out) {
stream2_free_msg(*msg_out);
*msg_out = NULL;
}
return r;
}
return STREAM2_OK;
}
static void free_start_msg(struct stream2_start_msg* msg) {
free(msg->arm_date);
for (size_t i = 0; i < msg->channels.len; i++)
free(msg->channels.ptr[i]);
free(msg->channels.ptr);
free(msg->countrate_correction_lookup_table.data.compression.algorithm);
free(msg->detector_description);
free(msg->detector_serial_number);
for (size_t i = 0; i < msg->flatfield.len; i++) {
free(msg->flatfield.ptr[i].channel);
free(msg->flatfield.ptr[i].flatfield.array.data.compression.algorithm);
}
free(msg->flatfield.ptr);
for (size_t i = 0; i < msg->pixel_mask.len; i++) {
free(msg->pixel_mask.ptr[i].channel);
free(msg->pixel_mask.ptr[i]
.pixel_mask.array.data.compression.algorithm);
}
free(msg->pixel_mask.ptr);
free(msg->sensor_material);
for (size_t i = 0; i < msg->threshold_energy.len; i++)
free(msg->threshold_energy.ptr[i].channel);
free(msg->threshold_energy.ptr);
}
static void free_image_msg(struct stream2_image_msg* msg) {
free(msg->series_date);
for (size_t i = 0; i < msg->data.len; i++) {
free(msg->data.ptr[i].channel);
free(msg->data.ptr[i].data.array.data.compression.algorithm);
}
free(msg->data.ptr);
}
void stream2_free_msg(struct stream2_msg* msg) {
switch (msg->type) {
case STREAM2_MSG_START:
free_start_msg((struct stream2_start_msg*)msg);
break;
case STREAM2_MSG_IMAGE:
free_image_msg((struct stream2_image_msg*)msg);
break;
case STREAM2_MSG_END:
break;
}
free(msg->series_unique_id);
free(msg);
}
enum stream2_result stream2_typed_array_elem_size(
const struct stream2_typed_array* array,
uint64_t* elem_size) {
// https://www.rfc-editor.org/rfc/rfc8746.html#name-types-of-numbers
if (array->tag >= 64 && array->tag <= 87) {
const uint64_t f = (array->tag >> 4) & 1;
const uint64_t ll = array->tag & 3;
*elem_size = 1 << (f + ll);
return STREAM2_OK;
}
return STREAM2_ERROR_NOT_IMPLEMENTED;
}

View File

@@ -1,213 +0,0 @@
// DECTRIS proprietary license
#pragma once
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#if defined(__cplusplus)
extern "C" {
#endif
enum stream2_result {
STREAM2_OK = 0,
STREAM2_ERROR_OUT_OF_MEMORY,
STREAM2_ERROR_SIGNATURE,
STREAM2_ERROR_DECODE,
STREAM2_ERROR_PARSE,
STREAM2_ERROR_NOT_IMPLEMENTED,
};
// https://github.com/dectris/documentation/blob/main/cbor/dectris-compression-tag.md
struct stream2_compression {
// Name of compression algorithm used.
char* algorithm;
// Element size if required for decompression, reserved otherwise.
// Required by algorithm "bslz4".
uint64_t elem_size;
// Uncompressed size of the data.
uint64_t orig_size;
};
// Represents a byte string, possibly compressed.
struct stream2_bytes {
const uint8_t* ptr;
size_t len;
struct stream2_compression compression;
};
// CBOR tags used with typed arrays.
//
// https://www.rfc-editor.org/rfc/rfc8746.html#tab-tag-values
enum stream2_typed_array_tag {
STREAM2_TYPED_ARRAY_UINT8 = 64,
STREAM2_TYPED_ARRAY_UINT16_LITTLE_ENDIAN = 69,
STREAM2_TYPED_ARRAY_UINT32_LITTLE_ENDIAN = 70,
STREAM2_TYPED_ARRAY_FLOAT32_LITTLE_ENDIAN = 85,
};
// A typed array defined in RFC 8746 section 2.
//
// https://www.rfc-editor.org/rfc/rfc8746.html#name-typed-arrays
struct stream2_typed_array {
// CBOR tag of the typed array.
uint64_t tag;
// Byte representation of the array elements.
struct stream2_bytes data;
};
// A multi-dimensional array defined in RFC 8746 section 3.1.
//
// The array is always a typed array of row-major order.
//
// https://www.rfc-editor.org/rfc/rfc8746.html#name-multi-dimensional-array
// https://www.rfc-editor.org/rfc/rfc8746.html#name-row-major-order
struct stream2_multidim_array {
uint64_t dim[2];
struct stream2_typed_array array;
};
struct stream2_array_text_string {
char** ptr;
size_t len;
};
struct stream2_flatfield {
char* channel;
struct stream2_multidim_array flatfield;
};
struct stream2_flatfield_map {
struct stream2_flatfield* ptr;
size_t len;
};
struct stream2_goniometer_axis {
double increment;
double start;
};
struct stream2_goniometer {
struct stream2_goniometer_axis chi;
struct stream2_goniometer_axis kappa;
struct stream2_goniometer_axis omega;
struct stream2_goniometer_axis phi;
struct stream2_goniometer_axis two_theta;
};
struct stream2_image_data {
char* channel;
struct stream2_multidim_array data;
};
struct stream2_image_data_map {
struct stream2_image_data* ptr;
size_t len;
};
struct stream2_pixel_mask {
char* channel;
struct stream2_multidim_array pixel_mask;
};
struct stream2_pixel_mask_map {
struct stream2_pixel_mask* ptr;
size_t len;
};
struct stream2_threshold_energy {
char* channel;
double energy;
};
struct stream2_threshold_energy_map {
struct stream2_threshold_energy* ptr;
size_t len;
};
struct stream2_user_data {
const uint8_t* ptr;
size_t len;
};
enum stream2_msg_type {
STREAM2_MSG_START,
STREAM2_MSG_IMAGE,
STREAM2_MSG_END,
};
struct stream2_msg {
enum stream2_msg_type type;
uint64_t series_id;
char* series_unique_id;
};
struct stream2_start_msg {
enum stream2_msg_type type;
uint64_t series_id;
char* series_unique_id;
char* arm_date;
double beam_center_x;
double beam_center_y;
struct stream2_array_text_string channels;
double count_time;
bool countrate_correction_enabled;
struct stream2_typed_array countrate_correction_lookup_table;
char* detector_description;
char* detector_serial_number;
double detector_translation[3];
struct stream2_flatfield_map flatfield;
bool flatfield_enabled;
double frame_time;
struct stream2_goniometer goniometer;
uint64_t image_size_x;
uint64_t image_size_y;
double incident_energy;
double incident_wavelength;
uint64_t number_of_images;
struct stream2_pixel_mask_map pixel_mask;
bool pixel_mask_enabled;
double pixel_size_x;
double pixel_size_y;
uint64_t saturation_value;
char* sensor_material;
double sensor_thickness;
struct stream2_threshold_energy_map threshold_energy;
struct stream2_user_data user_data;
bool virtual_pixel_interpolation_enabled;
};
struct stream2_image_msg {
enum stream2_msg_type type;
uint64_t series_id;
char* series_unique_id;
uint64_t image_id;
uint64_t real_time[2];
char* series_date;
uint64_t start_time[2];
uint64_t stop_time[2];
struct stream2_user_data user_data;
struct stream2_image_data_map data;
};
struct stream2_end_msg {
enum stream2_msg_type type;
uint64_t series_id;
char* series_unique_id;
};
enum stream2_result stream2_parse_msg(const uint8_t* buffer,
const size_t size,
struct stream2_msg** msg_out);
void stream2_free_msg(struct stream2_msg* msg);
// Gets the element size of a typed array.
enum stream2_result stream2_typed_array_elem_size(
const struct stream2_typed_array* array,
uint64_t* elem_size);
#if defined(__cplusplus)
}
#endif

View File

@@ -8,9 +8,6 @@ target_link_libraries(CompressionBenchmark JFJochWriter JFJochReceiver JFJochCom
add_executable(HDF5DatasetWriteTest HDF5DatasetWriteTest.cpp)
target_link_libraries(HDF5DatasetWriteTest JFJochWriter JFJochReceiver JFJochCommon)
ADD_EXECUTABLE(jfjoch_preview_test jfjoch_preview_test.cpp)
TARGET_LINK_LIBRARIES(jfjoch_preview_test JFJochReceiver ImagePusher JFJochCommon)
ADD_EXECUTABLE(jfjoch_writer_test jfjoch_writer_test.cpp)
TARGET_LINK_LIBRARIES(jfjoch_writer_test JFJochReceiver JFJochWriter ImagePusher JFJochCommon)

View File

@@ -158,7 +158,10 @@ int main(int argc, char **argv) {
EndMessage end_message;
end_message.max_image_number = x.GetImageNum();
HDF5Metadata::NXmx(start_message, end_message);
std::unique_ptr<NXmx> master_file = std::make_unique<NXmx>(start_message);
master_file->Finalize(end_message);
master_file.reset();
logger.Info("Writing done");

View File

@@ -1,74 +0,0 @@
// Copyright (2019-2023) Paul Scherrer Institute
#include "../frame_serialize/ZMQStream2PreviewPublisher.h"
#include "../common/Logger.h"
#include "../compression/JFJochCompressor.h"
int main(int argc, char **argv) {
Logger logger("jfjoch_preview_test");
if (argc != 2) {
logger.Error("Usage ./jfjoch_preview_test <ZeroMQ address>");
exit(EXIT_FAILURE);
}
ZMQContext context;
ZMQStream2PreviewPublisher publisher(context, argv[1]);
logger.Info("Preview messages published on socket {}", publisher.GetAddress());
size_t xpixel = 2068;
size_t ypixel = 2164;
std::vector<int16_t> data(xpixel * ypixel);
for (int i = 0; i < 100000; i++) {
for (int j = 0; j < xpixel * ypixel; j++)
data[j] = (i * 10 + j % 10) % INT16_MAX;
JFJochBitShuffleCompressor compressor(CompressionAlgorithm::BSHUF_LZ4);
auto compressed_data = compressor.Compress(data);
std::vector<SpotToSave> spots;
spots.push_back(SpotToSave{.x = 1000, .y = 1200, .intensity = 34, .indexed = false});
spots.push_back(SpotToSave{.x = 1300, .y = 1150, .intensity = 678, .indexed = false});
spots.push_back(SpotToSave{.x = 1600, .y = 1200, .intensity = 234, .indexed = false});
spots.push_back(SpotToSave{.x = 1200, .y = 1000, .intensity = 123, .indexed = true});
spots.push_back(SpotToSave{.x = 1500, .y = 1100, .intensity = 12, .indexed = true});
spots.push_back(SpotToSave{.x = 1700, .y = 1300, .intensity = 345, .indexed = true});
CompressedImage image {
.data = compressed_data.data(),
.size = compressed_data.size(),
.xpixel = xpixel,
.ypixel = ypixel,
.pixel_depth_bytes = 2,
.pixel_is_signed = true,
.pixel_is_float = false,
.algorithm = CompressionAlgorithm::BSHUF_LZ4,
.channel = "default"
};
DataMessage message {
.number = i,
.image = image,
.image_collection_efficiency = 1.0,
.spots = spots,
.bkg_estimate = 12.345f,
.indexing_result = 1,
.timestamp = 1ul<<27 | 1ul <<35,
.exptime = 100,
.series_unique_id = "111: image/test/lyso",
.series_id = 111,
.saturated_pixel_count = 378,
.user_data = "Some random string ABCDEF",
.jf_info = UINT32_MAX,
.receiver_aq_dev_delay = 2323,
.storage_cell = 0xF
};
publisher.SendImage(message);
std::this_thread::sleep_for(std::chrono::seconds(1));
logger.Info("Published image {}", i);
}
}

View File

@@ -6,7 +6,7 @@
#include <getopt.h>
#include "../common/DiffractionExperiment.h"
#include "../writer/HDF5ImagePusher.h"
#include "../writer/HDF5Writer.h"
#include "../common/Logger.h"
#include "../common/RawToConvertedGeometry.h"
#include "../receiver/FrameTransformation.h"

View File

@@ -93,11 +93,15 @@ int main(int argc, char **argv) {
pusher.StartDataCollection(start_message);
std::vector<uint8_t> v(x.GetMaxCompressedSize() + 1024*1024);
for (int i = 0; i < nimages_out; i++) {
DataMessage data_message;
data_message.number = i;
PrepareCBORImage(data_message, x, output[i % nimages_in_file].data(), output_size[i % nimages_in_file]);
pusher.SendImage(data_message);
CBORStream2Serializer serializer(v.data(), v.size());
serializer.SerializeImage(data_message);
pusher.SendImage(v.data(), v.size(), i);
}
EndMessage end_message{};

View File

@@ -11,8 +11,6 @@ ADD_LIBRARY(JFJochWriter STATIC
ZMQImagePuller.cpp ZMQImagePuller.h
StreamWriter.cpp StreamWriter.h
HDF5DataFile.h HDF5DataFile.cpp
HDF5ImagePusher.cpp
HDF5ImagePusher.h
HDF5DataFilePlugin.cpp
HDF5DataFilePlugin.h
HDF5DataFilePluginXFEL.cpp

View File

@@ -1,46 +0,0 @@
// Copyright (2019-2024) Paul Scherrer Institute
#include "HDF5ImagePusher.h"
#include "MakeDirectory.h"
#include "HDF5NXmx.h"
void HDF5ImagePusher::StartDataCollection(const StartMessage &message) {
std::unique_lock<std::mutex> ul(m);
start_message = message;
for (auto &i: start_message.pixel_mask)
i.Save();
for (auto &i: start_message.calibration)
i.Save();
CheckPath(start_message.file_prefix);
MakeDirectory(start_message.file_prefix);
writer = std::make_unique<HDF5Writer>(start_message);
}
bool HDF5ImagePusher::EndDataCollection(const EndMessage &message) {
std::unique_lock<std::mutex> ul(m);
if (!writer)
return false;
else {
writer.reset();
try {
if (!message.write_master_file || message.write_master_file.value())
HDF5Metadata::NXmx(start_message, message);
} catch (...) {
return false;
}
return true;
}
}
bool HDF5ImagePusher::SendImage(const DataMessage &message) {
std::unique_lock<std::mutex> ul(m);
if (writer) {
try {
writer->Write(message);
} catch (...) {
return false;
}
return true;
} else
return false;
}

View File

@@ -1,21 +0,0 @@
// Copyright (2019-2024) Paul Scherrer Institute
#ifndef JUNGFRAUJOCH_HDF5IMAGEPUSHER_H
#define JUNGFRAUJOCH_HDF5IMAGEPUSHER_H
#include "../frame_serialize/ImagePusher.h"
#include "HDF5Writer.h"
class HDF5ImagePusher : public ImagePusher {
std::mutex m;
StartMessage start_message;
std::unique_ptr<HDF5Writer> writer;
public:
void StartDataCollection(const StartMessage &message) override;
bool EndDataCollection(const EndMessage &message) override;
bool SendImage(const DataMessage &message) override;
};
#endif //JUNGFRAUJOCH_HDF5IMAGEPUSHER_H

View File

@@ -7,56 +7,69 @@
#include "../common/GitInfo.h"
#include "../include/spdlog/fmt/fmt.h"
#include "MakeDirectory.h"
#include "../common/time_utc.h"
void HDF5Metadata::NXmx( const StartMessage &start, const EndMessage &end) {
const std::string& filename = start.file_prefix + "_master.h5";
NXmx::NXmx(const StartMessage &start)
: start_message(start),
filename(start.file_prefix + "_master.h5") {
MakeDirectory(filename);
HDF5File hdf5_file(filename);
hdf5_file = std::make_unique<HDF5File>(filename + ".tmp");
chmod(filename.c_str(), S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP); // default permissions
hdf5_file.Attr("file_name", filename);
hdf5_file->Attr("file_name", filename);
hdf5_file.Attr("HDF5_Version", hdf5_version());
HDF5Group(hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
HDF5Group(hdf5_file, "/entry/result").NXClass("NXcollection");
hdf5_file->Attr("HDF5_Version", hdf5_version());
HDF5Group(*hdf5_file, "/entry").NXClass("NXentry").SaveScalar("definition", "NXmx");
HDF5Group(*hdf5_file, "/entry/result").NXClass("NXcollection");
hdf5_file->SaveScalar("/entry/start_time", start.arm_date);
LinkToData(&hdf5_file, start, end);
Facility(&hdf5_file, start);
Time(&hdf5_file, start, end);
Detector(&hdf5_file, start, end);
Metrology(&hdf5_file, start);
Beam(&hdf5_file, start);
Sample(&hdf5_file, start, end);
Calibration(&hdf5_file, start);
AzimuthalIntegration(&hdf5_file, start, end);
ADUHistogram(&hdf5_file, end);
Attenuator(&hdf5_file, start);
Facility(start);
Detector(start);
Metrology(start);
Beam(start);
Attenuator(start);
}
void HDF5Metadata::Time(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
if (end.end_date) {
hdf5_file->Attr("file_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time_estimated", end.end_date.value());
NXmx::~NXmx() {
std::string old_filename = filename + ".tmp";
std::rename(old_filename.c_str(), filename.c_str());
}
std::string HDF5Metadata::DataFileName(const std::string &prefix, int64_t file_number) {
if (file_number < 0)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"File number cannot be negative");
else if (file_number >= 1000000)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"Format doesn't allow for 1 million or more files");
else
return fmt::format("{:s}_data_{:06d}.h5", prefix, file_number + 1);
}
void NXmx::LinkToData(const StartMessage &start, const EndMessage &end) {
hsize_t total_images = end.max_image_number;
hsize_t images_per_file = start.images_per_file;
hsize_t file_count = 0;
if (start.images_per_file > 0) {
file_count = total_images / images_per_file;
if (total_images % images_per_file > 0)
file_count++;
}
hdf5_file->SaveScalar("/entry/start_time", start.arm_date);
HDF5Group(*hdf5_file, "/entry/data").NXClass("NXdata");
for (uint32_t file_id = 0; file_id < file_count; file_id++) {
char buff[32];
snprintf(buff,32,"/entry/data/data_%06d", file_id+1);
hdf5_file->ExternalLink(HDF5Metadata::DataFileName(start.file_prefix, file_id),
"/entry/data/data",
std::string(buff));
}
}
void HDF5Metadata::Facility(HDF5File *hdf5_file, const StartMessage &start) {
HDF5Group(*hdf5_file, "/entry/source").NXClass("NXsource");
SaveScalar(*hdf5_file, "/entry/source/name", start.source_name)
->Attr("short_name", start.source_name_short);
HDF5Group(*hdf5_file, "/entry/instrument").NXClass("NXinstrument");
SaveScalar(*hdf5_file, "/entry/instrument/name", start.instrument_name)
->Attr("short_name", start.instrument_name_short);
}
void HDF5Metadata::Detector(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
void NXmx::Detector(const StartMessage &start) {
HDF5Group group(*hdf5_file, "/entry/instrument/detector");
group.NXClass("NXdetector");
SaveScalar(group, "beam_center_x", start.beam_center_x)->Units("pixel");
@@ -87,7 +100,6 @@ void HDF5Metadata::Detector(HDF5File *hdf5_file, const StartMessage &start, cons
HDF5Group det_specific(group, "detectorSpecific");
det_specific.NXClass("NXcollection");
SaveScalar(det_specific, "nimages", end.max_image_number);
SaveScalar(det_specific, "ntrigger", 1);
SaveScalar(det_specific, "x_pixels_in_detector", static_cast<uint32_t>(start.image_size_x));
@@ -101,17 +113,64 @@ void HDF5Metadata::Detector(HDF5File *hdf5_file, const StartMessage &start, cons
"ns");
}
if (end.images_collected_count)
SaveScalar(det_specific, "nimages_collected", end.images_collected_count.value());
if (end.images_sent_to_write_count)
SaveScalar(det_specific, "nimages_written", end.images_sent_to_write_count.value());
if (end.efficiency)
SaveScalar(det_specific, "data_collection_efficiency", end.efficiency.value());
if (end.max_receiver_delay)
SaveScalar(det_specific, "max_receiver_delay", end.max_receiver_delay.value());
if (!start.gain_file_names.empty())
det_specific.SaveVector("gain_file_names", start.gain_file_names);
if (!start.pixel_mask.empty()) {
SaveCBORImage("/entry/instrument/detector/detectorSpecific/pixel_mask", start.pixel_mask[0]);
hdf5_file->HardLink("/entry/instrument/detector/detectorSpecific/pixel_mask",
"/entry/instrument/detector/pixel_mask");
}
}
void HDF5Metadata::Beam(HDF5File *hdf5_file, const StartMessage &start) {
void NXmx::Detector(const EndMessage &end) {
SaveScalar(*hdf5_file, "/entry/instrument/detector/detectorSpecific/nimages", end.max_image_number);
if (end.images_collected_count)
SaveScalar(*hdf5_file, "/entry/instrument/detector/detectorSpecific/nimages_collected", end.images_collected_count.value());
if (end.images_sent_to_write_count)
SaveScalar(*hdf5_file, "/entry/instrument/detector/detectorSpecific/nimages_written", end.images_sent_to_write_count.value());
if (end.efficiency)
SaveScalar(*hdf5_file, "/entry/instrument/detector/detectorSpecific/data_collection_efficiency", end.efficiency.value());
if (end.max_receiver_delay)
SaveScalar(*hdf5_file, "/entry/instrument/detector/detectorSpecific/max_receiver_delay", end.max_receiver_delay.value());
}
void NXmx::DetectorModule(const std::string &name, const std::vector<int32_t> &origin, const std::vector<int32_t> &size,
const std::vector<double> &fast_axis, const std::vector<double> &slow_axis,
const std::string &nx_axis, double pixel_size_mm) {
HDF5Group module_group(*hdf5_file, "/entry/instrument/detector/" + name);
module_group.NXClass("NXdetector_module");
module_group.SaveVector("data_origin", origin);
module_group.SaveVector("data_size", size);
SaveScalar(module_group, "fast_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", fast_axis,
{0,0,0}, "");
SaveScalar(module_group, "slow_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", slow_axis,
{0,0,0}, "");
SaveScalar(module_group, "module_offset", 0)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", {0,0,0});
}
void NXmx::Facility(const StartMessage &start) {
HDF5Group(*hdf5_file, "/entry/source").NXClass("NXsource");
SaveScalar(*hdf5_file, "/entry/source/name", start.source_name)
->Attr("short_name", start.source_name_short);
HDF5Group(*hdf5_file, "/entry/instrument").NXClass("NXinstrument");
SaveScalar(*hdf5_file, "/entry/instrument/name", start.instrument_name)
->Attr("short_name", start.instrument_name_short);
}
void NXmx::Beam(const StartMessage &start) {
HDF5Group group(*hdf5_file, "/entry/instrument/beam");
group.NXClass("NXbeam");
SaveScalar(group, "incident_wavelength", start.incident_wavelength)->Units("angstrom");
@@ -119,15 +178,32 @@ void HDF5Metadata::Beam(HDF5File *hdf5_file, const StartMessage &start) {
SaveScalar(group, "total_flux", start.total_flux.value())->Units("Hz");
}
void HDF5Metadata::Attenuator(HDF5File *hdf5_file, const StartMessage &start) {
if (start.attenuator_transmission) {
HDF5Group group(*hdf5_file, "/entry/instrument/attenuator");
group.NXClass("NXattenuator");
SaveScalar(group, "attenuator_transmission", start.attenuator_transmission.value());
}
void NXmx::Metrology(const StartMessage &start) {
HDF5Group transformations(*hdf5_file, "/entry/instrument/detector/transformations");
transformations.NXClass("NXtransformations");
std::vector<double> vector{start.beam_center_x * start.pixel_size_x,
start.beam_center_y * start.pixel_size_y,
start.detector_distance};
double vector_length = sqrt(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2]);
std::vector<double> vector_norm{vector[0] / vector_length, vector[1]/vector_length, vector[2]/vector_length};
SaveScalar(transformations, "translation", vector_length)->
Transformation("m", ".", "detector", "detector_arm", "translation", vector_norm);
// https://manual.nexusformat.org/classes/base_classes/NXdetector_module.html?highlight=nxdetector_module
// The order of indices (i, j or i, j, k) is slow to fast.
// though EIGER has is the other way round
// Confusing....
std::vector<int32_t> origin = {0, 0};
std::vector<int32_t> size = {static_cast<int32_t>(start.image_size_y),
static_cast<int32_t>(start.image_size_x)};
DetectorModule("module", origin, size, {-1,0,0}, {0,-1,0}, "translation", start.pixel_size_x);
}
void HDF5Metadata::Sample(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
void NXmx::Sample(const StartMessage &start, const EndMessage &end) {
HDF5Group group(*hdf5_file, "/entry/sample");
group.NXClass("NXsample");
group.SaveScalar("name", start.sample_name);
@@ -157,58 +233,19 @@ void HDF5Metadata::Sample(HDF5File *hdf5_file, const StartMessage &start, const
group.SaveScalar("depends_on", ".");
}
void HDF5Metadata::Metrology(HDF5File *hdf5_file, const StartMessage &start) {
HDF5Group transformations(*hdf5_file, "/entry/instrument/detector/transformations");
transformations.NXClass("NXtransformations");
std::vector<double> vector{start.beam_center_x * start.pixel_size_x,
start.beam_center_y * start.pixel_size_y,
start.detector_distance};
double vector_length = sqrt(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2]);
std::vector<double> vector_norm{vector[0] / vector_length, vector[1]/vector_length, vector[2]/vector_length};
SaveScalar(transformations, "translation", vector_length)->
Transformation("m", ".", "detector", "detector_arm", "translation", vector_norm);
// https://manual.nexusformat.org/classes/base_classes/NXdetector_module.html?highlight=nxdetector_module
// The order of indices (i, j or i, j, k) is slow to fast.
// though EIGER has is the other way round
// Confusing....
std::vector<int32_t> origin = {0, 0};
std::vector<int32_t> size = {static_cast<int32_t>(start.image_size_y),
static_cast<int32_t>(start.image_size_x)};
DetectorModule(hdf5_file, "module", origin, size, {-1,0,0}, {0,-1,0}, "translation", start.pixel_size_x);
void NXmx::Attenuator(const StartMessage &start) {
if (start.attenuator_transmission) {
HDF5Group group(*hdf5_file, "/entry/instrument/attenuator");
group.NXClass("NXattenuator");
SaveScalar(group, "attenuator_transmission", start.attenuator_transmission.value());
}
}
void HDF5Metadata::DetectorModule(HDF5File *hdf5_file, const std::string &name, const std::vector<int32_t> &origin,
const std::vector<int32_t> &size, const std::vector<double> &fast_axis,
const std::vector<double> &slow_axis,
const std::string &nx_axis, double pixel_size_mm) {
HDF5Group module_group(*hdf5_file, "/entry/instrument/detector/" + name);
module_group.NXClass("NXdetector_module");
module_group.SaveVector("data_origin", origin);
module_group.SaveVector("data_size", size);
SaveScalar(module_group, "fast_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", fast_axis,
{0,0,0}, "");
SaveScalar(module_group, "slow_pixel_direction", pixel_size_mm)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", slow_axis,
{0,0,0}, "");
SaveScalar(module_group, "module_offset", 0)->
Transformation("m", "/entry/instrument/detector/transformations/" + nx_axis,
"", "", "translation", {0,0,0});
void NXmx::WriteCalibration(const CompressedImage &image) {
SaveCBORImage("/entry/instrument/detector/detectorSpecific/" + image.channel, image);
}
void HDF5Metadata::SaveCBORImage(HDF5File *hdf5_file, const std::string &hdf5_path, const CompressedImage &image) {
void NXmx::SaveCBORImage(const std::string &hdf5_path, const CompressedImage &image) {
std::vector<hsize_t> dims = {image.ypixel, image.xpixel};
HDF5DataType data_type(image.pixel_depth_bytes, image.pixel_is_signed);
@@ -228,49 +265,7 @@ void HDF5Metadata::SaveCBORImage(HDF5File *hdf5_file, const std::string &hdf5_pa
dataset->WriteDirectChunk(image.data, image.size, {0,0});
}
void HDF5Metadata::Calibration(HDF5File *hdf5_file, const StartMessage &start) {
if (!start.pixel_mask.empty()) {
SaveCBORImage(hdf5_file, "/entry/instrument/detector/detectorSpecific/pixel_mask", start.pixel_mask[0]);
}
if (!start.gain_file_names.empty())
hdf5_file->SaveVector("/entry/instrument/detector/detectorSpecific/gain_file_names", start.gain_file_names);
for (const auto &i: start.calibration)
SaveCBORImage(hdf5_file, "/entry/instrument/detector/detectorSpecific/" + i.channel, i);
}
void HDF5Metadata::LinkToData(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
hsize_t total_images = end.max_image_number;
hsize_t images_per_file = start.images_per_file;
hsize_t file_count = 0;
if (start.images_per_file > 0) {
file_count = total_images / images_per_file;
if (total_images % images_per_file > 0)
file_count++;
}
HDF5Group(*hdf5_file, "/entry/data").NXClass("NXdata");
for (uint32_t file_id = 0; file_id < file_count; file_id++) {
char buff[32];
snprintf(buff,32,"/entry/data/data_%06d", file_id+1);
hdf5_file->ExternalLink(DataFileName(start.file_prefix, file_id), "/entry/data/data", std::string(buff));
}
}
std::string HDF5Metadata::DataFileName(const std::string &prefix, int64_t file_number) {
if (file_number < 0)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"File number cannot be negative");
else if (file_number >= 1000000)
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid,
"Format doesn't allow for 1 million or more files");
else
return fmt::format("{:s}_data_{:06d}.h5", prefix, file_number + 1);
}
void HDF5Metadata::AzimuthalIntegration(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end) {
void NXmx::AzimuthalIntegration(const StartMessage &start, const EndMessage &end) {
if (!start.az_int_bin_to_q.empty()) {
HDF5Group rad_int_group(*hdf5_file, "/entry/result/azimIntegration");
rad_int_group.SaveVector("bin_to_q", start.az_int_bin_to_q);
@@ -280,11 +275,30 @@ void HDF5Metadata::AzimuthalIntegration(HDF5File *hdf5_file, const StartMessage
}
}
void HDF5Metadata::ADUHistogram(HDF5File *hdf5_file, const EndMessage &end) {
void NXmx::ADUHistogram(const EndMessage &end) {
if (!end.adu_histogram.empty()) {
HDF5Group adu_histo_group(*hdf5_file, "/entry/result/adu_histogram");
adu_histo_group.SaveScalar("bin_width", end.adu_histogram_bin_width);
for (const auto &[x, y]: end.adu_histogram)
adu_histo_group.SaveVector(x, y);
}
}
}
void NXmx::Finalize(const EndMessage &end) {
if (end.end_date) {
hdf5_file->Attr("file_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time", end.end_date.value());
hdf5_file->SaveScalar("/entry/end_time_estimated", end.end_date.value());
} else {
std::string time_now = time_UTC(std::chrono::system_clock::now());
hdf5_file->Attr("file_time", time_now);
hdf5_file->SaveScalar("/entry/end_time", time_now);
hdf5_file->SaveScalar("/entry/end_time_estimated", time_now);
}
Detector(end);
LinkToData(start_message, end);
Sample(start_message, end);
AzimuthalIntegration(start_message, end);
ADUHistogram(end);
}

View File

@@ -8,28 +8,39 @@
#include "HDF5Objects.h"
namespace HDF5Metadata {
void NXmx(const StartMessage &start, const EndMessage &end);
void Time(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
void LinkToData(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
std::string DataFileName(const std::string &prefix, int64_t file_number);
}
void Detector(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
void DetectorModule(HDF5File *hdf5_file, const std::string &name, const std::vector<int32_t> &origin,
class NXmx {
std::unique_ptr<HDF5File> hdf5_file;
const StartMessage start_message;
const std::string filename;
void LinkToData(const StartMessage &start, const EndMessage &end);
void Detector(const StartMessage &start);
void Detector(const EndMessage &end);
void DetectorModule(const std::string &name, const std::vector<int32_t> &origin,
const std::vector<int32_t> &size, const std::vector<double> &fast_axis,
const std::vector<double> &slow_axis, const std::string &nx_axis, double pixel_size_mm);
void Facility(HDF5File *hdf5_file, const StartMessage &start);
void Beam(HDF5File *hdf5_file, const StartMessage &start);
void Metrology(HDF5File *hdf5_file, const StartMessage &start);
void Sample(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
void Attenuator(HDF5File *hdf5_file, const StartMessage &start);
void Facility(const StartMessage &start);
void Beam(const StartMessage &start);
void Metrology(const StartMessage &start);
void Sample(const StartMessage &start, const EndMessage &end);
void Attenuator(const StartMessage &start);
void Mask(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
void Calibration(HDF5File *hdf5_file, const StartMessage &start);
void SaveCBORImage(const std::string& hdf5_path, const CompressedImage &image);
void AzimuthalIntegration(const StartMessage &start, const EndMessage &end);
void ADUHistogram(const EndMessage &end);
void SaveCBORImage(HDF5File *hdf5_file, const std::string& hdf5_path, const CompressedImage &image);
void AzimuthalIntegration(HDF5File *hdf5_file, const StartMessage &start, const EndMessage &end);
void ADUHistogram(HDF5File *hdf5_file, const EndMessage &end);
std::string DataFileName(const std::string& prefix, int64_t file_number);
}
public:
NXmx(const StartMessage& start);
~NXmx();
NXmx(const NXmx &other) = delete;
NXmx& operator=(const NXmx &other) = delete;
void Finalize(const EndMessage &end);
void WriteCalibration(const CompressedImage &image);
};
#endif //JUNGFRAUJOCH_HDF5NXMX_H

View File

@@ -35,9 +35,18 @@ void StreamWriter::CollectImages(std::vector<HDF5DataFileStatistics> &v) {
CheckPath(start_message.file_prefix);
MakeDirectory(start_message.file_prefix);
HDF5Writer writer(start_message);
std::unique_ptr<NXmx> master_file;
if (!start_message.write_master_file || start_message.write_master_file.value())
master_file = std::make_unique<NXmx>(start_message);
bool first_image = true;
run = WaitForImage();
while (run && (image_puller.GetFrameType() == CBORStream2Deserializer::Type::CALIBRATION)) {
if (master_file)
master_file->WriteCalibration(image_puller.GetCalibrationMessage());
run = WaitForImage();
}
while (run && (image_puller.GetFrameType() == CBORStream2Deserializer::Type::IMAGE)) {
if (first_image) {
state = StreamWriterState::Receiving;
@@ -64,8 +73,9 @@ void StreamWriter::CollectImages(std::vector<HDF5DataFileStatistics> &v) {
if ((end_message.max_image_number == 0) && (max_image_number > 0))
end_message.max_image_number = max_image_number;
if (!end_message.write_master_file || end_message.write_master_file.value())
HDF5Metadata::NXmx(start_message, end_message);
if (master_file)
master_file->Finalize(end_message);
master_file.reset();
state = StreamWriterState::Idle;
}

View File

@@ -44,13 +44,24 @@ bool ZMQImagePuller::WaitForImage() {
if (msg_size > 0) {
deserializer.Process(zmq_recv_buffer);
if (deserializer.GetType() == CBORStream2Deserializer::Type::START) {
start_message = std::make_unique<StartMessage>(deserializer.GetStartMessage());
end_message.reset();
} else if (deserializer.GetType() == CBORStream2Deserializer::Type::END)
end_message = std::make_unique<EndMessage>(deserializer.GetEndMessage());
else if (deserializer.GetType() == CBORStream2Deserializer::Type::IMAGE)
deserialized_image_message = std::make_unique<DataMessage>(deserializer.GetDataMessage());
switch (deserializer.GetType()) {
case CBORStream2Deserializer::Type::START:
start_message = std::make_unique<StartMessage>(deserializer.GetStartMessage());
end_message.reset();
break;
case CBORStream2Deserializer::Type::END:
end_message = std::make_unique<EndMessage>(deserializer.GetEndMessage());
break;
case CBORStream2Deserializer::Type::IMAGE:
deserialized_image_message = std::make_unique<DataMessage>(deserializer.GetDataMessage());
break;
case CBORStream2Deserializer::Type::CALIBRATION:
calibration_message = std::make_unique<CompressedImage>(deserializer.GetCalibrationImage());
break;
case CBORStream2Deserializer::Type::NONE:
break;
}
if (repub_socket) {
// Republishing is non-blocking for images
@@ -91,3 +102,10 @@ EndMessage ZMQImagePuller::GetEndMessage() const {
else
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid, "Start message not received so far");
}
CompressedImage ZMQImagePuller::GetCalibrationMessage() const {
if (calibration_message)
return *calibration_message;
else
throw JFJochException(JFJochExceptionCategory::InputParameterInvalid, "Calibration message not received so far");
}

View File

@@ -26,6 +26,7 @@ class ZMQImagePuller {
std::unique_ptr<StartMessage> start_message;
std::unique_ptr<EndMessage> end_message;
std::unique_ptr<DataMessage> deserialized_image_message;
std::unique_ptr<CompressedImage> calibration_message;
std::unique_ptr<ZMQSocket> repub_socket;
public:
@@ -39,6 +40,7 @@ public:
[[nodiscard]] CBORStream2Deserializer::Type GetFrameType() const;
[[nodiscard]] StartMessage GetStartMessage() const;
[[nodiscard]] EndMessage GetEndMessage() const;
[[nodiscard]] CompressedImage GetCalibrationMessage() const;
};
#endif //JUNGFRAUJOCH_ZMQIMAGEPULLER_H