mirror of
https://github.com/slsdetectorgroup/aare.git
synced 2025-06-13 07:47:13 +02:00
reading multi port files (PR#2) (#31)
* works, not tested well on multi columns --------- Co-authored-by: Erik Fröjdh <erik.frojdh@gmail.com>
This commit is contained in:
@ -1,25 +1,48 @@
|
||||
#include "aare/JsonFile.hpp"
|
||||
#include "aare/utils/logger.hpp"
|
||||
|
||||
Frame JsonFile::get_frame(size_t frame_number) {
|
||||
if (frame_number > this->total_frames) {
|
||||
throw std::runtime_error("Frame number out of range");
|
||||
}
|
||||
int subfile_id = frame_number / this->max_frames_per_file;
|
||||
std::byte *buffer;
|
||||
size_t frame_size = this->subfiles[subfile_id]->bytes_per_frame();
|
||||
buffer = new std::byte[frame_size];
|
||||
this->subfiles[subfile_id]->get_frame(buffer, frame_number % this->max_frames_per_file);
|
||||
auto f = Frame(buffer, this->rows, this->cols, this->bitdepth );
|
||||
// create frame and get its buffer
|
||||
auto f = Frame(this->rows, this->cols, this->bitdepth);
|
||||
std::byte *frame_buffer = f._get_data();
|
||||
|
||||
if (this->geometry.col == 1) {
|
||||
// get the part from each subfile and copy it to the frame
|
||||
for (size_t part_idx = 0; part_idx != this->n_subfile_parts; ++part_idx) {
|
||||
auto part_offset = this->subfiles[subfile_id][part_idx]->bytes_per_part();
|
||||
this->subfiles[subfile_id][part_idx]->get_part(frame_buffer + part_idx*part_offset, frame_number % this->max_frames_per_file);
|
||||
}
|
||||
|
||||
} else {
|
||||
// create a buffer that will hold a the frame part
|
||||
auto bytes_per_part = this->subfile_rows * this->subfile_cols * this->bitdepth / 8;
|
||||
std::byte *part_buffer = new std::byte[bytes_per_part];
|
||||
|
||||
for (size_t part_idx = 0; part_idx != this->n_subfile_parts; ++part_idx) {
|
||||
this->subfiles[subfile_id][part_idx]->get_part(part_buffer, frame_number % this->max_frames_per_file);
|
||||
for (int cur_row = 0; cur_row < (this->subfile_rows); cur_row++) {
|
||||
auto irow = cur_row + (part_idx / this->geometry.col) * this->subfile_rows;
|
||||
auto icol = (part_idx % this->geometry.col) * this->subfile_cols;
|
||||
auto dest = (irow * this->cols + icol);
|
||||
dest = dest * this->bitdepth / 8;
|
||||
memcpy(frame_buffer + dest, part_buffer + cur_row * this->subfile_cols * this->bitdepth / 8,
|
||||
this->subfile_cols * this->bitdepth / 8);
|
||||
}
|
||||
}
|
||||
delete[] part_buffer;
|
||||
}
|
||||
|
||||
delete[] buffer;
|
||||
return f;
|
||||
}
|
||||
|
||||
JsonFile::~JsonFile() {
|
||||
for (auto& subfile : subfiles) {
|
||||
delete subfile;
|
||||
for (auto &vec : subfiles) {
|
||||
for (auto subfile : vec) {
|
||||
delete subfile;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -3,6 +3,8 @@
|
||||
#include "aare/SubFile.hpp"
|
||||
#include "aare/defs.hpp"
|
||||
#include "aare/helpers.hpp"
|
||||
#include "aare/utils/logger.hpp"
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <nlohmann/json.hpp>
|
||||
@ -15,8 +17,8 @@ JsonFileFactory::JsonFileFactory(std::filesystem::path fpath) {
|
||||
this->m_fpath = fpath;
|
||||
}
|
||||
|
||||
void JsonFileFactory::parse_metadata(File*_file) {
|
||||
auto file = dynamic_cast<JsonFile*>(_file);
|
||||
void JsonFileFactory::parse_metadata(File *_file) {
|
||||
auto file = dynamic_cast<JsonFile *>(_file);
|
||||
std::ifstream ifs(file->master_fname());
|
||||
json j;
|
||||
ifs >> j;
|
||||
@ -26,8 +28,8 @@ void JsonFileFactory::parse_metadata(File*_file) {
|
||||
file->type = StringTo<DetectorType>(j["Detector Type"].get<std::string>());
|
||||
file->timing_mode = StringTo<TimingMode>(j["Timing Mode"].get<std::string>());
|
||||
file->total_frames = j["Frames in File"];
|
||||
file->subfile_cols = j["Pixels"]["x"];
|
||||
file->subfile_rows = j["Pixels"]["y"];
|
||||
file->subfile_cols = j["Pixels"]["x"];
|
||||
file->max_frames_per_file = j["Max Frames Per File"];
|
||||
try {
|
||||
file->bitdepth = j.at("Dynamic Range");
|
||||
@ -38,29 +40,36 @@ void JsonFileFactory::parse_metadata(File*_file) {
|
||||
if (file->type == DetectorType::Eiger) {
|
||||
file->quad = (j["Quad"] == 1);
|
||||
}
|
||||
|
||||
file->geometry = {j["Geometry"]["y"], j["Geometry"]["x"]};
|
||||
file->n_subfile_parts = file->geometry.row * file->geometry.col;
|
||||
}
|
||||
|
||||
void JsonFileFactory::open_subfiles(File*_file) {
|
||||
auto file = dynamic_cast<JsonFile*>(_file);
|
||||
for (int i = 0; i != file->n_subfiles; ++i) {
|
||||
|
||||
file->subfiles.push_back(
|
||||
new SubFile(file->data_fname(i, 0), file->type, file->subfile_rows, file->subfile_cols, file->bitdepth));
|
||||
void JsonFileFactory::open_subfiles(File *_file) {
|
||||
auto file = dynamic_cast<JsonFile *>(_file);
|
||||
for (size_t i = 0; i != file->n_subfiles; ++i) {
|
||||
auto v = std::vector<SubFile *>(file->n_subfile_parts);
|
||||
for (size_t j = 0; j != file->n_subfile_parts; ++j) {
|
||||
v[j]=new SubFile(file->data_fname(i, j), file->type, file->subfile_rows,
|
||||
file->subfile_cols, file->bitdepth);
|
||||
}
|
||||
file->subfiles.push_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
JsonFile* JsonFileFactory::load_file() {
|
||||
JsonFile* file = new JsonFile();
|
||||
JsonFile *JsonFileFactory::load_file() {
|
||||
JsonFile *file = new JsonFile();
|
||||
file->fname = this->m_fpath;
|
||||
this->parse_fname(file);
|
||||
this->parse_metadata(file);
|
||||
file->find_number_of_subfiles();
|
||||
|
||||
this->find_geometry(file);
|
||||
this->open_subfiles(file);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
|
||||
sls_detector_header JsonFileFactory::read_header(const std::filesystem::path &fname) {
|
||||
sls_detector_header h{};
|
||||
FILE *fp = fopen(fname.c_str(), "r");
|
||||
@ -74,18 +83,20 @@ sls_detector_header JsonFileFactory::read_header(const std::filesystem::path &fn
|
||||
return h;
|
||||
}
|
||||
|
||||
|
||||
void JsonFileFactory::find_geometry(File* _file) {
|
||||
auto file = dynamic_cast<JsonFile*>(_file);
|
||||
void JsonFileFactory::find_geometry(File *_file) {
|
||||
auto file = dynamic_cast<JsonFile *>(_file);
|
||||
uint16_t r{};
|
||||
uint16_t c{};
|
||||
for (int i = 0; i != file->n_subfiles; ++i) {
|
||||
auto h = this->read_header(file->data_fname(i, 0));
|
||||
r = std::max(r, h.row);
|
||||
c = std::max(c, h.column);
|
||||
for (size_t i = 0; i < file->n_subfile_parts; i++) {
|
||||
for (size_t j = 0; j != file->n_subfiles; ++j) {
|
||||
auto h = this->read_header(file->data_fname(j, i));
|
||||
r = std::max(r, h.row);
|
||||
c = std::max(c, h.column);
|
||||
|
||||
file->positions.push_back({h.row, h.column});
|
||||
file->positions.push_back({h.row, h.column});
|
||||
}
|
||||
}
|
||||
|
||||
r++;
|
||||
c++;
|
||||
|
||||
@ -95,7 +106,7 @@ void JsonFileFactory::find_geometry(File* _file) {
|
||||
file->rows += (r - 1) * file->cfg.module_gap_row;
|
||||
}
|
||||
|
||||
void JsonFileFactory::parse_fname(File* file) {
|
||||
void JsonFileFactory::parse_fname(File *file) {
|
||||
|
||||
file->base_path = this->m_fpath.parent_path();
|
||||
file->base_name = this->m_fpath.stem();
|
||||
@ -106,5 +117,3 @@ void JsonFileFactory::parse_fname(File* file) {
|
||||
pos = file->base_name.find("_master_");
|
||||
file->base_name.erase(pos);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include "aare/SubFile.hpp"
|
||||
#include <iostream>
|
||||
#include "aare/utils/logger.hpp"
|
||||
// #include <filesystem>
|
||||
|
||||
|
||||
@ -9,38 +10,37 @@ SubFile::SubFile(std::filesystem::path fname, DetectorType detector, ssize_t row
|
||||
this->m_cols = cols;
|
||||
this->m_fname = fname;
|
||||
this->m_bitdepth = bitdepth;
|
||||
fp = fopen(fname.c_str(), "rb");
|
||||
if (fp == nullptr) {
|
||||
throw std::runtime_error("Could not open file " + fname.string());
|
||||
}
|
||||
std::cout << "File opened" << std::endl;
|
||||
n_frames = std::filesystem::file_size(fname) / (sizeof(sls_detector_header) + rows * cols * bitdepth / 8);
|
||||
std::cout << "Number of frames: " << n_frames << std::endl;
|
||||
|
||||
this->n_frames = std::filesystem::file_size(fname) / (sizeof(sls_detector_header) + rows * cols * bitdepth / 8);
|
||||
if (read_impl_map.find({detector, bitdepth}) == read_impl_map.end()) {
|
||||
throw std::runtime_error("Unsupported detector/bitdepth combination");
|
||||
}
|
||||
read_impl = read_impl_map.at({detector, bitdepth});
|
||||
|
||||
|
||||
|
||||
this->read_impl = read_impl_map.at({detector, bitdepth});
|
||||
|
||||
}
|
||||
|
||||
size_t SubFile::get_frame(std::byte *buffer, int frame_number) {
|
||||
|
||||
size_t SubFile::get_part(std::byte *buffer, int frame_number) {
|
||||
if (frame_number >= n_frames or frame_number < 0) {
|
||||
throw std::runtime_error("Frame number out of range");
|
||||
}
|
||||
fseek(fp, sizeof(sls_detector_header) + (sizeof(sls_detector_header) + bytes_per_frame()) * frame_number, SEEK_SET);
|
||||
return (this->*read_impl)(buffer);
|
||||
// TODO: find a way to avoid opening and closing the file for each frame
|
||||
aare::logger::debug(LOCATION,"frame:", frame_number, "file:", m_fname.c_str());
|
||||
fp = fopen(m_fname.c_str(), "rb");
|
||||
if (!fp) {
|
||||
throw std::runtime_error(fmt::format("Could not open: {} for reading", m_fname.c_str()));
|
||||
}
|
||||
fseek(fp, sizeof(sls_detector_header) + (sizeof(sls_detector_header) + bytes_per_part()) * frame_number, SEEK_SET);
|
||||
auto ret = (this->*read_impl)(buffer);
|
||||
fclose(fp);
|
||||
return ret;
|
||||
}
|
||||
|
||||
size_t SubFile::read_impl_normal(std::byte *buffer) { return fread(buffer, this->bytes_per_frame(), 1, this->fp); }
|
||||
size_t SubFile::read_impl_normal(std::byte *buffer) { return fread(buffer, this->bytes_per_part(), 1, this->fp); }
|
||||
|
||||
template <typename DataType> size_t SubFile::read_impl_reorder(std::byte *buffer) {
|
||||
|
||||
std::vector<DataType> tmp(this->pixels_per_frame());
|
||||
size_t rc = fread(reinterpret_cast<char *>(&tmp[0]), this->bytes_per_frame(), 1, this->fp);
|
||||
std::vector<DataType> tmp(this->pixels_per_part());
|
||||
size_t rc = fread(reinterpret_cast<char *>(&tmp[0]), this->bytes_per_part(), 1, this->fp);
|
||||
|
||||
int adc_nr[32] = {300, 325, 350, 375, 300, 325, 350, 375, 200, 225, 250, 275, 200, 225, 250, 275,
|
||||
100, 125, 150, 175, 100, 125, 150, 175, 0, 25, 50, 75, 0, 25, 50, 75};
|
||||
@ -69,8 +69,8 @@ template <typename DataType> size_t SubFile::read_impl_flip(std::byte *buffer) {
|
||||
|
||||
// read to temporary buffer
|
||||
// TODO! benchmark direct reads
|
||||
std::vector<std::byte> tmp(this->bytes_per_frame());
|
||||
size_t rc = fread(reinterpret_cast<char *>(&tmp[0]), this->bytes_per_frame(), 1, this->fp);
|
||||
std::vector<std::byte> tmp(this->bytes_per_part());
|
||||
size_t rc = fread(reinterpret_cast<char *>(&tmp[0]), this->bytes_per_part(), 1, this->fp);
|
||||
|
||||
// copy to place
|
||||
const size_t start = this->m_cols * (this->m_rows - 1) * sizeof(DataType);
|
||||
|
Reference in New Issue
Block a user