16 Commits

Author SHA1 Message Date
f161df3591 fixed calculate eta 2025-04-16 09:30:26 +02:00
15e52565a9 dont convert to byte 2025-04-11 14:35:20 +02:00
e71569b15e resize before read 2025-04-11 13:38:33 +02:00
92f5421481 np test 2025-04-10 16:58:47 +02:00
113f34cc98 fixes 2025-04-10 16:50:04 +02:00
53a90e197e added additional tests
All checks were successful
Build on RHEL9 / buildh (push) Successful in 1m52s
2025-04-10 10:41:58 +02:00
76f050f69f solved merge conflict
Some checks failed
Build on RHEL9 / buildh (push) Failing after 1m22s
2025-04-10 09:21:50 +02:00
a13affa4d3 changed template arguments added tests 2025-04-10 09:13:58 +02:00
8b0eee1e66 fixed warnings and removed ambiguous read_frame (#154)
All checks were successful
Build on RHEL9 / buildh (push) Successful in 1m47s
Fixed warnings:
- unused variable in Interpolator
- Narrowing conversions uint64-->int64

Removed an ambiguous function from JungfrauDataFile
- NDarry read_frame(header&=nullptr)
- Frame read_frame()

NDArray and NDView size() is now signed
2025-04-09 17:54:55 +02:00
894065fe9c added utility plot
All checks were successful
Build on RHEL9 / buildh (push) Successful in 1m48s
2025-04-09 12:19:14 +02:00
f16273a566 Adding support for Jungfrau .dat files (#152)
All checks were successful
Build on RHEL9 / buildh (push) Successful in 1m48s
closes #150 

**Not addressed in this PR:** 

- pixels_per_frame, bytes_per_frame and tell should be made cost in
FileInterface
2025-04-08 15:31:04 +02:00
20d1d02fda function signature for push back (#153)
Some checks failed
Build the package using cmake then documentation / build (ubuntu-latest, 3.12) (push) Failing after 48s
This example now works:
```python
cl = Cluster3x3i(5,7,np.array((1,2,3,4,5,6,7,8,9), dtype = np.int32))
cv = ClusterVector_Cluster3x3i()
cv.push_back(cl)
```
2025-04-07 17:18:17 +02:00
7db1ae4d94 Dev/gitea ci (#151)
All checks were successful
Build on RHEL9 / buildh (push) Successful in 1m41s
Build and test on internal PSI gitea
2025-04-03 13:18:55 +02:00
8cad7a50a6 fixed py
Some checks failed
Build the package using cmake then documentation / build (ubuntu-latest, 3.12) (push) Failing after 42s
2025-04-01 15:00:03 +02:00
9d8e803474 Merge branch 'main' into developer 2025-04-01 14:35:27 +02:00
5d8ad27b21 Developer (#138)
All checks were successful
Build the package using cmake then documentation / build (ubuntu-latest, 3.12) (push) Successful in 1m45s
- Fully functioning variable size cluster finder
- Added interpolation
- Bit reordering for ADC SAR 05

---------

Co-authored-by: Patrick <patrick.sieberer@psi.ch>
Co-authored-by: JulianHeymes <julian.heymes@psi.ch>
Co-authored-by: Dhanya Thattil <dhanya.thattil@psi.ch>
Co-authored-by: xiangyu.xie <xiangyu.xie@psi.ch>
2025-03-20 12:52:04 +01:00
43 changed files with 1603 additions and 203 deletions

View File

@ -2,9 +2,8 @@ name: Build the package using cmake then documentation
on:
workflow_dispatch:
push:
permissions:
contents: read
@ -16,12 +15,12 @@ jobs:
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest, ] # macos-12, windows-2019]
python-version: ["3.12",]
platform: [ubuntu-latest, ]
python-version: ["3.12", ]
runs-on: ${{ matrix.platform }}
# The setup-miniconda action needs this to activate miniconda
defaults:
run:
shell: "bash -l {0}"
@ -35,13 +34,13 @@ jobs:
sudo apt-get -y install cmake gcc g++
- name: Get conda
uses: conda-incubator/setup-miniconda@v3.0.4
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python-version }}
environment-file: etc/dev-env.yml
miniforge-version: latest
channels: conda-forge
- name: Prepare
run: conda install doxygen sphinx=7.1.2 breathe pybind11 sphinx_rtd_theme furo nlohmann_json zeromq fmt numpy
conda-remove-defaults: "true"
- name: Build library
run: |
@ -56,3 +55,4 @@ jobs:

View File

@ -0,0 +1,30 @@
name: Build on RHEL8
on:
workflow_dispatch:
permissions:
contents: read
jobs:
buildh:
runs-on: "ubuntu-latest"
container:
image: gitea.psi.ch/images/rhel8-developer-gitea-actions
steps:
- uses: actions/checkout@v4
- name: Install dependencies
run: |
dnf install -y cmake python3.12 python3.12-devel python3.12-pip
- name: Build library
run: |
mkdir build && cd build
cmake .. -DAARE_PYTHON_BINDINGS=ON -DAARE_TESTS=ON
make -j 2
- name: C++ unit tests
working-directory: ${{gitea.workspace}}/build
run: ctest

View File

@ -0,0 +1,31 @@
name: Build on RHEL9
on:
push:
workflow_dispatch:
permissions:
contents: read
jobs:
buildh:
runs-on: "ubuntu-latest"
container:
image: gitea.psi.ch/images/rhel9-developer-gitea-actions
steps:
- uses: actions/checkout@v4
- name: Install dependencies
run: |
dnf install -y cmake python3.12 python3.12-devel python3.12-pip
- name: Build library
run: |
mkdir build && cd build
cmake .. -DAARE_PYTHON_BINDINGS=ON -DAARE_TESTS=ON
make -j 2
- name: C++ unit tests
working-directory: ${{gitea.workspace}}/build
run: ctest

View File

@ -5,7 +5,6 @@ on:
push:
permissions:
contents: read
pages: write
@ -16,12 +15,11 @@ jobs:
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest, ] # macos-12, windows-2019]
platform: [ubuntu-latest, ]
python-version: ["3.12",]
runs-on: ${{ matrix.platform }}
# The setup-miniconda action needs this to activate miniconda
defaults:
run:
shell: "bash -l {0}"
@ -30,13 +28,13 @@ jobs:
- uses: actions/checkout@v4
- name: Get conda
uses: conda-incubator/setup-miniconda@v3.0.4
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python-version }}
environment-file: etc/dev-env.yml
miniforge-version: latest
channels: conda-forge
- name: Prepare
run: conda install doxygen sphinx=7.1.2 breathe pybind11 sphinx_rtd_theme furo nlohmann_json zeromq fmt numpy
conda-remove-defaults: "true"
- name: Build library
run: |

View File

@ -104,6 +104,7 @@ if(AARE_FETCH_LMFIT)
)
endif()
#Disable what we don't need from lmfit
set(BUILD_TESTING OFF CACHE BOOL "")
set(LMFIT_CPPTEST OFF CACHE BOOL "")
@ -343,9 +344,11 @@ set(PUBLICHEADERS
include/aare/File.hpp
include/aare/Fit.hpp
include/aare/FileInterface.hpp
include/aare/FilePtr.hpp
include/aare/Frame.hpp
include/aare/GainMap.hpp
include/aare/geo_helpers.hpp
include/aare/JungfrauDataFile.hpp
include/aare/NDArray.hpp
include/aare/NDView.hpp
include/aare/NumpyFile.hpp
@ -367,8 +370,10 @@ set(SourceFiles
${CMAKE_CURRENT_SOURCE_DIR}/src/decode.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/Frame.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/File.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/FilePtr.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/Fit.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/geo_helpers.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/JungfrauDataFile.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/NumpyFile.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/NumpyHelpers.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/Interpolator.cpp
@ -423,6 +428,7 @@ if(AARE_TESTS)
${CMAKE_CURRENT_SOURCE_DIR}/src/CalculateEta.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/ClusterFile.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/Pedestal.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/JungfrauDataFile.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/NumpyFile.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/NumpyHelpers.test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/RawFile.test.cpp

View File

@ -5,6 +5,7 @@ package:
source:
path: ..

View File

@ -0,0 +1,25 @@
JungfrauDataFile
==================
JungfrauDataFile is a class to read the .dat files that are produced by Aldo's receiver.
It is mostly used for calibration.
The structure of the file is:
* JungfrauDataHeader
* Binary data (256x256, 256x1024 or 512x1024)
* JungfrauDataHeader
* ...
There is no metadata indicating number of frames or the size of the image, but this
will be infered by this reader.
.. doxygenstruct:: aare::JungfrauDataHeader
:members:
:undoc-members:
:private-members:
.. doxygenclass:: aare::JungfrauDataFile
:members:
:undoc-members:
:private-members:

47
docs/src/Tests.rst Normal file
View File

@ -0,0 +1,47 @@
****************
Tests
****************
We test the code both from the C++ and Python API. By default only tests that does not require image data is run.
C++
~~~~~~~~~~~~~~~~~~
.. code-block:: bash
mkdir build
cd build
cmake .. -DAARE_TESTS=ON
make -j 4
export AARE_TEST_DATA=/path/to/test/data
./run_test [.files] #or using ctest, [.files] is the option to include tests needing data
Python
~~~~~~~~~~~~~~~~~~
.. code-block:: bash
#From the root dir of the library
python -m pytest python/tests --files # passing --files will run the tests needing data
Getting the test data
~~~~~~~~~~~~~~~~~~~~~~~~
.. attention ::
The tests needing the test data are not run by default. To make the data available, you need to set the environment variable
AARE_TEST_DATA to the path of the test data directory. Then pass either [.files] for the C++ tests or --files for Python
The image files needed for the test are large and are not included in the repository. They are stored
using GIT LFS in a separate repository. To get the test data, you need to clone the repository.
To do this, you need to have GIT LFS installed. You can find instructions on how to install it here: https://git-lfs.github.com/
Once you have GIT LFS installed, you can clone the repository like any normal repo using:
.. code-block:: bash
git clone https://gitea.psi.ch/detectors/aare-test-data.git

5
docs/src/algorithm.rst Normal file
View File

@ -0,0 +1,5 @@
algorithm
=============
.. doxygenfile:: algorithm.hpp

View File

@ -20,9 +20,6 @@ AARE
Requirements
Consume
.. toctree::
:caption: Python API
:maxdepth: 1
@ -31,6 +28,7 @@ AARE
pyCtbRawFile
pyClusterFile
pyClusterVector
pyJungfrauDataFile
pyRawFile
pyRawMasterFile
pyVarClusterFinder
@ -42,6 +40,7 @@ AARE
:caption: C++ API
:maxdepth: 1
algorithm
NDArray
NDView
Frame
@ -51,6 +50,7 @@ AARE
ClusterFinderMT
ClusterFile
ClusterVector
JungfrauDataFile
Pedestal
RawFile
RawSubFile
@ -59,4 +59,8 @@ AARE
.. toctree::
:caption: Developer
:maxdepth: 3
Tests

View File

@ -0,0 +1,10 @@
JungfrauDataFile
===================
.. py:currentmodule:: aare
.. autoclass:: JungfrauDataFile
:members:
:undoc-members:
:show-inheritance:
:inherited-members:

15
etc/dev-env.yml Normal file
View File

@ -0,0 +1,15 @@
name: dev-environment
channels:
- conda-forge
dependencies:
- anaconda-client
- doxygen
- sphinx=7.1.2
- breathe
- pybind11
- sphinx_rtd_theme
- furo
- nlohmann_json
- zeromq
- fmt
- numpy

View File

@ -229,13 +229,13 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
}
ClusterVector<ClusterType> clusters(n_clusters);
clusters.resize(n_clusters);
int32_t iframe = 0; // frame number needs to be 4 bytes!
size_t nph_read = 0;
uint32_t nn = m_num_left;
uint32_t nph = m_num_left; // number of clusters in frame needs to be 4
// auto buf = reinterpret_cast<Cluster3x3 *>(clusters.data());
auto buf = clusters.data();
// if there are photons left from previous frame read them first
if (nph) {
@ -246,8 +246,7 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
} else {
nn = nph;
}
nph_read += fread((buf + nph_read * clusters.item_size()),
clusters.item_size(), nn, fp);
nph_read += fread((buf + nph_read), clusters.item_size(), nn, fp);
m_num_left = nph - nn; // write back the number of photons left
}
@ -262,8 +261,8 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
else
nn = nph;
nph_read += fread((buf + nph_read * clusters.item_size()),
clusters.item_size(), nn, fp);
nph_read +=
fread((buf + nph_read), clusters.item_size(), nn, fp);
m_num_left = nph - nn;
}
if (nph_read >= n_clusters)
@ -283,7 +282,7 @@ template <typename ClusterType, typename Enable>
ClusterVector<ClusterType>
ClusterFile<ClusterType, Enable>::read_clusters_with_cut(size_t n_clusters) {
ClusterVector<ClusterType> clusters;
clusters.reserve(n_clusters);
clusters.resize(n_clusters);
// if there are photons left from previous frame read them first
if (m_num_left) {

View File

@ -297,7 +297,7 @@ class ClusterVector<Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>> {
* @param frame_number frame number of the clusters. Default is 0, which is
* also used to indicate that the clusters come from many frames
*/
ClusterVector(size_t capacity = 1024, uint64_t frame_number = 0)
ClusterVector(size_t capacity = 300, int32_t frame_number = 0)
: m_frame_number(frame_number) {
m_data.reserve(capacity);
}

30
include/aare/FilePtr.hpp Normal file
View File

@ -0,0 +1,30 @@
#pragma once
#include <cstdio>
#include <filesystem>
namespace aare {
/**
* \brief RAII wrapper for FILE pointer
*/
class FilePtr {
FILE *fp_{nullptr};
public:
FilePtr() = default;
FilePtr(const std::filesystem::path& fname, const std::string& mode);
FilePtr(const FilePtr &) = delete; // we don't want a copy
FilePtr &operator=(const FilePtr &) = delete; // since we handle a resource
FilePtr(FilePtr &&other);
FilePtr &operator=(FilePtr &&other);
FILE *get();
int64_t tell();
void seek(int64_t offset, int whence = SEEK_SET) {
if (fseek(fp_, offset, whence) != 0)
throw std::runtime_error("Error seeking in file");
}
std::string error_msg();
~FilePtr();
};
} // namespace aare

View File

@ -0,0 +1,106 @@
#pragma once
#include <cstdint>
#include <filesystem>
#include <vector>
#include "aare/FilePtr.hpp"
#include "aare/defs.hpp"
#include "aare/NDArray.hpp"
#include "aare/FileInterface.hpp"
namespace aare {
struct JungfrauDataHeader{
uint64_t framenum;
uint64_t bunchid;
};
class JungfrauDataFile : public FileInterface {
size_t m_rows{}; //!< number of rows in the image, from find_frame_size();
size_t m_cols{}; //!< number of columns in the image, from find_frame_size();
size_t m_bytes_per_frame{}; //!< number of bytes per frame excluding header
size_t m_total_frames{}; //!< total number of frames in the series of files
size_t m_offset{}; //!< file index of the first file, allow starting at non zero file
size_t m_current_file_index{}; //!< The index of the open file
size_t m_current_frame_index{}; //!< The index of the current frame (with reference to all files)
std::vector<size_t> m_last_frame_in_file{}; //!< Used for seeking to the correct file
std::filesystem::path m_path; //!< path to the files
std::string m_base_name; //!< base name used for formatting file names
FilePtr m_fp; //!< RAII wrapper for a FILE*
using pixel_type = uint16_t;
static constexpr size_t header_size = sizeof(JungfrauDataHeader);
static constexpr size_t n_digits_in_file_index = 6; //!< to format file names
public:
JungfrauDataFile(const std::filesystem::path &fname);
std::string base_name() const; //!< get the base name of the file (without path and extension)
size_t bytes_per_frame() override;
size_t pixels_per_frame() override;
size_t bytes_per_pixel() const;
size_t bitdepth() const override;
void seek(size_t frame_index) override; //!< seek to the given frame index (note not byte offset)
size_t tell() override; //!< get the frame index of the file pointer
size_t total_frames() const override;
size_t rows() const override;
size_t cols() const override;
std::array<ssize_t,2> shape() const;
size_t n_files() const; //!< get the number of files in the series.
// Extra functions needed for FileInterface
Frame read_frame() override;
Frame read_frame(size_t frame_number) override;
std::vector<Frame> read_n(size_t n_frames=0) override;
void read_into(std::byte *image_buf) override;
void read_into(std::byte *image_buf, size_t n_frames) override;
size_t frame_number(size_t frame_index) override;
DetectorType detector_type() const override;
/**
* @brief Read a single frame from the file into the given buffer.
* @param image_buf buffer to read the frame into. (Note the caller is responsible for allocating the buffer)
* @param header pointer to a JungfrauDataHeader or nullptr to skip header)
*/
void read_into(std::byte *image_buf, JungfrauDataHeader *header = nullptr);
/**
* @brief Read a multiple frames from the file into the given buffer.
* @param image_buf buffer to read the frame into. (Note the caller is responsible for allocating the buffer)
* @param n_frames number of frames to read
* @param header pointer to a JungfrauDataHeader or nullptr to skip header)
*/
void read_into(std::byte *image_buf, size_t n_frames, JungfrauDataHeader *header = nullptr);
/**
* @brief Read a single frame from the file into the given NDArray
* @param image NDArray to read the frame into.
*/
void read_into(NDArray<uint16_t>* image, JungfrauDataHeader* header = nullptr);
JungfrauDataHeader read_header();
std::filesystem::path current_file() const { return fpath(m_current_file_index+m_offset); }
private:
/**
* @brief Find the size of the frame in the file. (256x256, 256x1024, 512x1024)
* @param fname path to the file
* @throws std::runtime_error if the file is empty or the size cannot be determined
*/
void find_frame_size(const std::filesystem::path &fname);
void parse_fname(const std::filesystem::path &fname);
void scan_files();
void open_file(size_t file_index);
std::filesystem::path fpath(size_t frame_index) const;
};
} // namespace aare

View File

@ -194,7 +194,7 @@ class NDArray : public ArrayExpr<NDArray<T, Ndim>, Ndim> {
T *data() { return data_; }
std::byte *buffer() { return reinterpret_cast<std::byte *>(data_); }
size_t size() const { return size_; }
ssize_t size() const { return static_cast<ssize_t>(size_); }
size_t total_bytes() const { return size_ * sizeof(T); }
std::array<int64_t, Ndim> shape() const noexcept { return shape_; }
int64_t shape(int64_t i) const noexcept { return shape_[i]; }

View File

@ -71,7 +71,7 @@ template <typename T, int64_t Ndim = 2> class NDView : public ArrayExpr<NDView<T
return buffer_[element_offset(strides_, index...)];
}
size_t size() const { return size_; }
ssize_t size() const { return static_cast<ssize_t>(size_); }
size_t total_bytes() const { return size_ * sizeof(T); }
std::array<int64_t, Ndim> strides() const noexcept { return strides_; }
@ -102,7 +102,7 @@ template <typename T, int64_t Ndim = 2> class NDView : public ArrayExpr<NDView<T
template<size_t Size>
NDView& operator=(const std::array<T, Size> &arr) {
if(size() != arr.size())
if(size() != static_cast<ssize_t>(arr.size()))
throw std::runtime_error(LOCATION + "Array and NDView size mismatch");
std::copy(arr.begin(), arr.end(), begin());
return *this;

View File

@ -226,7 +226,7 @@ template <typename T> void VarClusterFinder<T>::single_pass(NDView<T, 2> img) {
template <typename T> void VarClusterFinder<T>::first_pass() {
for (size_t i = 0; i < original_.size(); ++i) {
for (ssize_t i = 0; i < original_.size(); ++i) {
if (use_noise_map)
threshold_ = 5 * noiseMap(i);
binary_(i) = (original_(i) > threshold_);
@ -250,7 +250,7 @@ template <typename T> void VarClusterFinder<T>::first_pass() {
template <typename T> void VarClusterFinder<T>::second_pass() {
for (size_t i = 0; i != labeled_.size(); ++i) {
for (ssize_t i = 0; i != labeled_.size(); ++i) {
auto cl = labeled_(i);
if (cl != 0) {
auto it = child.find(cl);

View File

@ -7,13 +7,20 @@
namespace aare {
/**
* @brief Find the index of the last element smaller than val
* assume a sorted array
* @brief Index of the last element that is smaller than val.
* Requires a sorted array. Uses >= for ordering. If all elements
* are smaller it returns the last element and if all elements are
* larger it returns the first element.
* @param first iterator to the first element
* @param last iterator to the last element
* @param val value to compare
* @return index of the last element that is smaller than val
*
*/
template <typename T>
size_t last_smaller(const T* first, const T* last, T val) {
for (auto iter = first+1; iter != last; ++iter) {
if (*iter > val) {
if (*iter >= val) {
return std::distance(first, iter-1);
}
}
@ -25,7 +32,49 @@ size_t last_smaller(const NDArray<T, 1>& arr, T val) {
return last_smaller(arr.begin(), arr.end(), val);
}
template <typename T>
size_t last_smaller(const std::vector<T>& vec, T val) {
return last_smaller(vec.data(), vec.data()+vec.size(), val);
}
/**
* @brief Index of the first element that is larger than val.
* Requires a sorted array. Uses > for ordering. If all elements
* are larger it returns the first element and if all elements are
* smaller it returns the last element.
* @param first iterator to the first element
* @param last iterator to the last element
* @param val value to compare
* @return index of the first element that is larger than val
*/
template <typename T>
size_t first_larger(const T* first, const T* last, T val) {
for (auto iter = first; iter != last; ++iter) {
if (*iter > val) {
return std::distance(first, iter);
}
}
return std::distance(first, last-1);
}
template <typename T>
size_t first_larger(const NDArray<T, 1>& arr, T val) {
return first_larger(arr.begin(), arr.end(), val);
}
template <typename T>
size_t first_larger(const std::vector<T>& vec, T val) {
return first_larger(vec.data(), vec.data()+vec.size(), val);
}
/**
* @brief Index of the nearest element to val.
* Requires a sorted array. If there is no difference it takes the first element.
* @param first iterator to the first element
* @param last iterator to the last element
* @param val value to compare
* @return index of the nearest element
*/
template <typename T>
size_t nearest_index(const T* first, const T* last, T val) {
auto iter = std::min_element(first, last,
@ -50,6 +99,13 @@ size_t nearest_index(const std::array<T,N>& arr, T val) {
return nearest_index(arr.data(), arr.data()+arr.size(), val);
}
template <typename T>
std::vector<T> cumsum(const std::vector<T>& vec) {
std::vector<T> result(vec.size());
std::partial_sum(vec.begin(), vec.end(), result.begin());
return result;
}
} // namespace aare

View File

@ -8,10 +8,16 @@ version = "2025.4.1"
[tool.scikit-build]
cmake.verbose = true
[tool.scikit-build.cmake.define]
AARE_PYTHON_BINDINGS = "ON"
AARE_SYSTEM_LIBRARIES = "ON"
AARE_INSTALL_PYTHONEXT = "ON"
AARE_INSTALL_PYTHONEXT = "ON"
[tool.pytest.ini_options]
markers = [
"files: marks tests that need additional data (deselect with '-m \"not files\"')",
]

View File

@ -2,8 +2,8 @@
from . import _aare
# from ._aare import File, RawMasterFile, RawSubFile
# from ._aare import Pedestal_d, Pedestal_f, ClusterFinder, VarClusterFinder
from ._aare import File, RawMasterFile, RawSubFile, JungfrauDataFile
from ._aare import Pedestal_d, Pedestal_f, ClusterFinder_Cluster3x3i, VarClusterFinder
from ._aare import DetectorType
from ._aare import ClusterFile_Cluster3x3i as ClusterFile
from ._aare import hitmap
@ -17,7 +17,7 @@ from .CtbRawFile import CtbRawFile
from .RawFile import RawFile
from .ScanParameters import ScanParameters
from .utils import random_pixels, random_pixel, flat_list
from .utils import random_pixels, random_pixel, flat_list, add_colorbar
#make functions available in the top level API

View File

@ -1,4 +1,6 @@
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
def random_pixels(n_pixels, xmin=0, xmax=512, ymin=0, ymax=1024):
"""Return a list of random pixels.
@ -24,4 +26,11 @@ def random_pixel(xmin=0, xmax=512, ymin=0, ymax=1024):
def flat_list(xss):
"""Flatten a list of lists."""
return [x for xs in xss for x in xs]
return [x for xs in xss for x in xs]
def add_colorbar(ax, im, size="5%", pad=0.05):
"""Add a colorbar with the same height as the image."""
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size=size, pad=pad)
plt.colorbar(im, cax=cax)
return ax, im, cax

View File

@ -18,15 +18,16 @@ using pd_type = double;
using namespace aare;
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
template <typename Type, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType>
void define_cluster(py::module &m, const std::string &typestr) {
auto class_name = fmt::format("Cluster{}", typestr);
using ClusterType =
Cluster<Type, ClusterSizeX, ClusterSizeY, CoordType, void>;
py::class_<Cluster<Type, ClusterSizeX, ClusterSizeY, CoordType, void>>(
m, class_name.c_str())
m, class_name.c_str(), py::buffer_protocol())
.def(py::init([](uint8_t x, uint8_t y, py::array_t<Type> data) {
py::buffer_info buf_info = data.request();
@ -37,83 +38,59 @@ void define_cluster(py::module &m, const std::string &typestr) {
std::copy(ptr, ptr + ClusterSizeX * ClusterSizeY,
cluster.data); // Copy array contents
return cluster;
}))
}));
//.def(py::init<>())
.def_readwrite("x", &ClusterType::x)
.def_readwrite("y", &ClusterType::y)
.def_property(
"data",
[](ClusterType &c) -> py::array {
return py::array(py::buffer_info(
c.data, sizeof(Type),
py::format_descriptor<Type>::format(), // Type
// format
1, // Number of dimensions
{static_cast<ssize_t>(ClusterSizeX *
ClusterSizeY)}, // Shape (flattened)
{sizeof(Type)} // Stride (step size between elements)
));
},
[](ClusterType &c, py::array_t<Type> arr) {
py::buffer_info buf_info = arr.request();
Type *ptr = static_cast<Type *>(buf_info.ptr);
std::copy(ptr, ptr + ClusterSizeX * ClusterSizeY,
c.data); // TODO dont iterate over centers!!!
});
/*
.def_property(
"data",
[](ClusterType &c) -> py::array {
return py::array(py::buffer_info(
c.data, sizeof(Type),
py::format_descriptor<Type>::format(), // Type
// format
1, // Number of dimensions
{static_cast<ssize_t>(ClusterSizeX *
ClusterSizeY)}, // Shape (flattened)
{sizeof(Type)} // Stride (step size between elements)
));
},
[](ClusterType &c, py::array_t<Type> arr) {
py::buffer_info buf_info = arr.request();
Type *ptr = static_cast<Type *>(buf_info.ptr);
std::copy(ptr, ptr + ClusterSizeX * ClusterSizeY,
c.data); // TODO dont iterate over centers!!!
});
*/
}
template <typename Type, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType = uint16_t>
void define_cluster_vector(py::module &m, const std::string &typestr) {
using ClusterType =
Cluster<Type, ClusterSizeX, ClusterSizeY, uint16_t, void>;
Cluster<Type, ClusterSizeX, ClusterSizeY, CoordType, void>;
auto class_name = fmt::format("ClusterVector_{}", typestr);
py::class_<ClusterVector<ClusterType>>(m, class_name.c_str(),
py::buffer_protocol())
py::class_<ClusterVector<
Cluster<Type, ClusterSizeX, ClusterSizeY, CoordType, void>, void>>(
m, class_name.c_str(),
py::buffer_protocol())
.def(py::init()) // TODO change!!!
/*
.def("push_back",
[](ClusterVector<ClusterType> &self, ClusterType &cl) {
// auto view = make_view_2d(data);
self.push_back(cl);
})
*/
/*
.def(
"push_back",
[](ClusterVector<ClusterType> &self, py::object obj) {
ClusterType &cl = py::cast<ClusterType &>(obj);
self.push_back(cl);
},
py::arg("cluster"))
*/
.def("push_back",
[](ClusterVector<ClusterType> &self, const ClusterType &cluster) {
self.push_back(cluster);
})
//.def("push_back", &ClusterVector<ClusterType>::push_back) //TODO
// implement push_back
.def_property_readonly("size", &ClusterVector<ClusterType>::size)
.def("item_size", &ClusterVector<ClusterType>::item_size)
.def_property_readonly("fmt",
[typestr]() { return fmt_format<ClusterType>; })
/*
.def("sum",
[](ClusterVector<ClusterType> &self) {
auto *vec = new std::vector<T>(self.sum());
return return_vector(vec);
})
.def("sum_2x2",
[](ClusterVector<ClusterType> &self) {
auto *vec = new std::vector<T>(self.sum_2x2());
return return_vector(vec);
})
*/
[typestr](ClusterVector<ClusterType> &self) {
return fmt_format<ClusterType>;
})
.def_property_readonly("cluster_size_x",
&ClusterVector<ClusterType>::cluster_size_x)
.def_property_readonly("cluster_size_y",
@ -135,11 +112,14 @@ void define_cluster_vector(py::module &m, const std::string &typestr) {
});
}
template <typename ClusterType>
template <typename T, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType = uint16_t>
void define_cluster_finder_mt_bindings(py::module &m,
const std::string &typestr) {
auto class_name = fmt::format("ClusterFinderMT_{}", typestr);
using ClusterType = Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>;
py::class_<ClusterFinderMT<ClusterType, uint16_t, pd_type>>(
m, class_name.c_str())
.def(py::init<Shape<2>, pd_type, size_t, size_t>(),
@ -185,11 +165,14 @@ void define_cluster_finder_mt_bindings(py::module &m,
py::arg("thread_index") = 0);
}
template <typename ClusterType>
template <typename T, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType = uint16_t>
void define_cluster_collector_bindings(py::module &m,
const std::string &typestr) {
auto class_name = fmt::format("ClusterCollector_{}", typestr);
using ClusterType = Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>;
py::class_<ClusterCollector<ClusterType>>(m, class_name.c_str())
.def(py::init<ClusterFinderMT<ClusterType, uint16_t, double> *>())
.def("stop", &ClusterCollector<ClusterType>::stop)
@ -198,26 +181,32 @@ void define_cluster_collector_bindings(py::module &m,
[](ClusterCollector<ClusterType> &self) {
auto v = new std::vector<ClusterVector<ClusterType>>(
self.steal_clusters());
return v;
return v; // TODO change!!!
},
py::return_value_policy::take_ownership);
}
template <typename ClusterType>
template <typename T, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType = uint16_t>
void define_cluster_file_sink_bindings(py::module &m,
const std::string &typestr) {
auto class_name = fmt::format("ClusterFileSink_{}", typestr);
using ClusterType = Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>;
py::class_<ClusterFileSink<ClusterType>>(m, class_name.c_str())
.def(py::init<ClusterFinderMT<ClusterType, uint16_t, double> *,
const std::filesystem::path &>())
.def("stop", &ClusterFileSink<ClusterType>::stop);
}
template <typename ClusterType>
template <typename T, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
typename CoordType = uint16_t>
void define_cluster_finder_bindings(py::module &m, const std::string &typestr) {
auto class_name = fmt::format("ClusterFinder_{}", typestr);
using ClusterType = Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>;
py::class_<ClusterFinder<ClusterType, uint16_t, pd_type>>(
m, class_name.c_str())
.def(py::init<Shape<2>, pd_type, size_t>(), py::arg("image_size"),
@ -248,9 +237,9 @@ void define_cluster_finder_bindings(py::module &m, const std::string &typestr) {
"steal_clusters",
[](ClusterFinder<ClusterType, uint16_t, pd_type> &self,
bool realloc_same_capacity) {
auto v = new ClusterVector<ClusterType>(
self.steal_clusters(realloc_same_capacity));
return v;
ClusterVector<ClusterType> clusters =
self.steal_clusters(realloc_same_capacity);
return clusters;
},
py::arg("realloc_same_capacity") = false)
.def(
@ -284,3 +273,4 @@ void define_cluster_finder_bindings(py::module &m, const std::string &typestr) {
return hitmap;
});
}
#pragma GCC diagnostic pop

View File

@ -19,11 +19,12 @@
namespace py = pybind11;
using namespace ::aare;
template <typename ClusterType>
template <typename Type, uint8_t CoordSizeX, uint8_t CoordSizeY,
typename CoordType = uint16_t>
void define_cluster_file_io_bindings(py::module &m,
const std::string &typestr) {
// PYBIND11_NUMPY_DTYPE(Cluster<int, 3, 3>, x, y,
// data); // is this used - maybe use as cluster type
using ClusterType = Cluster<Type, CoordSizeX, CoordSizeY, CoordType>;
auto class_name = fmt::format("ClusterFile_{}", typestr);
@ -80,7 +81,12 @@ void define_cluster_file_io_bindings(py::module &m,
}
return v;
});
}
template <typename Type, uint8_t CoordSizeX, uint8_t CoordSizeY,
typename CoordType = uint16_t>
void register_calculate_eta(py::module &m) {
using ClusterType = Cluster<Type, CoordSizeX, CoordSizeY, CoordType>;
m.def("calculate_eta2",
[](const aare::ClusterVector<ClusterType> &clusters) {
auto eta2 = new NDArray<double, 2>(calculate_eta2(clusters));

View File

@ -9,12 +9,13 @@
namespace py = pybind11;
template <typename ClusterType>
void register_interpolate(py::class_<aare::Interpolator> &interpolator,
const std::string &typestr) {
auto name = fmt::format("interpolate_{}", typestr);
template <typename Type, uint8_t CoordSizeX, uint8_t CoordSizeY,
typename CoordType = uint16_t>
void register_interpolate(py::class_<aare::Interpolator> &interpolator) {
interpolator.def(name.c_str(),
using ClusterType = Cluster<Type, CoordSizeX, CoordSizeY, CoordType>;
interpolator.def("interpolate",
[](aare::Interpolator &self,
const ClusterVector<ClusterType> &clusters) {
auto photons = self.interpolate<ClusterType>(clusters);
@ -50,12 +51,12 @@ void define_interpolation_bindings(py::module &m) {
return return_image_data(ptr);
});
register_interpolate<Cluster<int, 3, 3>>(interpolator, "Cluster3x3i");
register_interpolate<Cluster<float, 3, 3>>(interpolator, "Cluster3x3f");
register_interpolate<Cluster<double, 3, 3>>(interpolator, "Cluster3x3d");
register_interpolate<Cluster<int, 2, 2>>(interpolator, "Cluster2x2i");
register_interpolate<Cluster<float, 2, 2>>(interpolator, "Cluster2x2f");
register_interpolate<Cluster<double, 2, 2>>(interpolator, "Cluster2x2d");
register_interpolate<int, 3, 3, uint16_t>(interpolator);
register_interpolate<float, 3, 3, uint16_t>(interpolator);
register_interpolate<double, 3, 3, uint16_t>(interpolator);
register_interpolate<int, 2, 2, uint16_t>(interpolator);
register_interpolate<float, 2, 2, uint16_t>(interpolator);
register_interpolate<double, 2, 2, uint16_t>(interpolator);
// TODO! Evaluate without converting to double
m.def(

View File

@ -0,0 +1,116 @@
#include "aare/JungfrauDataFile.hpp"
#include "aare/defs.hpp"
#include <cstdint>
#include <filesystem>
#include <pybind11/iostream.h>
#include <pybind11/numpy.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl/filesystem.h>
#include <string>
namespace py = pybind11;
using namespace ::aare;
// Disable warnings for unused parameters, as we ignore some
// in the __exit__ method
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
auto read_dat_frame(JungfrauDataFile &self) {
py::array_t<JungfrauDataHeader> header(1);
py::array_t<uint16_t> image({
self.rows(),
self.cols()
});
self.read_into(reinterpret_cast<std::byte *>(image.mutable_data()),
header.mutable_data());
return py::make_tuple(header, image);
}
auto read_n_dat_frames(JungfrauDataFile &self, size_t n_frames) {
// adjust for actual frames left in the file
n_frames = std::min(n_frames, self.total_frames() - self.tell());
if (n_frames == 0) {
throw std::runtime_error("No frames left in file");
}
py::array_t<JungfrauDataHeader> header(n_frames);
py::array_t<uint16_t> image({
n_frames, self.rows(),
self.cols()});
self.read_into(reinterpret_cast<std::byte *>(image.mutable_data()),
n_frames, header.mutable_data());
return py::make_tuple(header, image);
}
void define_jungfrau_data_file_io_bindings(py::module &m) {
// Make the JungfrauDataHeader usable from numpy
PYBIND11_NUMPY_DTYPE(JungfrauDataHeader, framenum, bunchid);
py::class_<JungfrauDataFile>(m, "JungfrauDataFile")
.def(py::init<const std::filesystem::path &>())
.def("seek", &JungfrauDataFile::seek,
R"(
Seek to the given frame index.
)")
.def("tell", &JungfrauDataFile::tell,
R"(
Get the current frame index.
)")
.def_property_readonly("rows", &JungfrauDataFile::rows)
.def_property_readonly("cols", &JungfrauDataFile::cols)
.def_property_readonly("base_name", &JungfrauDataFile::base_name)
.def_property_readonly("bytes_per_frame",
&JungfrauDataFile::bytes_per_frame)
.def_property_readonly("pixels_per_frame",
&JungfrauDataFile::pixels_per_frame)
.def_property_readonly("bytes_per_pixel",
&JungfrauDataFile::bytes_per_pixel)
.def_property_readonly("bitdepth", &JungfrauDataFile::bitdepth)
.def_property_readonly("current_file", &JungfrauDataFile::current_file)
.def_property_readonly("total_frames", &JungfrauDataFile::total_frames)
.def_property_readonly("n_files", &JungfrauDataFile::n_files)
.def("read_frame", &read_dat_frame,
R"(
Read a single frame from the file.
)")
.def("read_n", &read_n_dat_frames,
R"(
Read maximum n_frames frames from the file.
)")
.def(
"read",
[](JungfrauDataFile &self) {
self.seek(0);
auto n_frames = self.total_frames();
return read_n_dat_frames(self, n_frames);
},
R"(
Read all frames from the file. Seeks to the beginning before reading.
)")
.def("__enter__", [](JungfrauDataFile &self) { return &self; })
.def("__exit__",
[](JungfrauDataFile &self,
const std::optional<pybind11::type> &exc_type,
const std::optional<pybind11::object> &exc_value,
const std::optional<pybind11::object> &traceback) {
// self.close();
})
.def("__iter__", [](JungfrauDataFile &self) { return &self; })
.def("__next__", [](JungfrauDataFile &self) {
try {
return read_dat_frame(self);
} catch (std::runtime_error &e) {
throw py::stop_iteration();
}
});
}
#pragma GCC diagnostic pop

View File

@ -11,6 +11,8 @@
#include "raw_master_file.hpp"
#include "var_cluster.hpp"
#include "jungfrau_data_file.hpp"
// Pybind stuff
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
@ -28,13 +30,14 @@ PYBIND11_MODULE(_aare, m) {
define_pedestal_bindings<float>(m, "Pedestal_f");
define_fit_bindings(m);
define_interpolation_bindings(m);
define_jungfrau_data_file_io_bindings(m);
define_cluster_file_io_bindings<Cluster<int, 3, 3>>(m, "Cluster3x3i");
define_cluster_file_io_bindings<Cluster<double, 3, 3>>(m, "Cluster3x3d");
define_cluster_file_io_bindings<Cluster<float, 3, 3>>(m, "Cluster3x3f");
define_cluster_file_io_bindings<Cluster<int, 2, 2>>(m, "Cluster2x2i");
define_cluster_file_io_bindings<Cluster<float, 2, 2>>(m, "Cluster2x2f");
define_cluster_file_io_bindings<Cluster<double, 2, 2>>(m, "Cluster2x2d");
define_cluster_file_io_bindings<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_file_io_bindings<double, 3, 3, uint16_t>(m, "Cluster3x3d");
define_cluster_file_io_bindings<float, 3, 3, uint16_t>(m, "Cluster3x3f");
define_cluster_file_io_bindings<int, 2, 2, uint16_t>(m, "Cluster2x2i");
define_cluster_file_io_bindings<float, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_file_io_bindings<double, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster_vector<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_vector<double, 3, 3, uint16_t>(m, "Cluster3x3d");
@ -43,33 +46,33 @@ PYBIND11_MODULE(_aare, m) {
define_cluster_vector<double, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster_vector<float, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_finder_bindings<Cluster<int, 3, 3>>(m, "Cluster3x3i");
define_cluster_finder_bindings<Cluster<double, 3, 3>>(m, "Cluster3x3d");
define_cluster_finder_bindings<Cluster<float, 3, 3>>(m, "Cluster3x3f");
define_cluster_finder_bindings<Cluster<int, 2, 2>>(m, "Cluster2x2i");
define_cluster_finder_bindings<Cluster<double, 2, 2>>(m, "Cluster2x2d");
define_cluster_finder_bindings<Cluster<float, 2, 2>>(m, "Cluster2x2f");
define_cluster_finder_bindings<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_finder_bindings<double, 3, 3, uint16_t>(m, "Cluster3x3d");
define_cluster_finder_bindings<float, 3, 3, uint16_t>(m, "Cluster3x3f");
define_cluster_finder_bindings<int, 2, 2, uint16_t>(m, "Cluster2x2i");
define_cluster_finder_bindings<double, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster_finder_bindings<float, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_finder_mt_bindings<Cluster<int, 3, 3>>(m, "Cluster3x3i");
define_cluster_finder_mt_bindings<Cluster<double, 3, 3>>(m, "Cluster3x3d");
define_cluster_finder_mt_bindings<Cluster<float, 3, 3>>(m, "Cluster3x3f");
define_cluster_finder_mt_bindings<Cluster<int, 2, 2>>(m, "Cluster2x2i");
define_cluster_finder_mt_bindings<Cluster<double, 2, 2>>(m, "Cluster2x2d");
define_cluster_finder_mt_bindings<Cluster<float, 2, 2>>(m, "Cluster2x2f");
define_cluster_finder_mt_bindings<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_finder_mt_bindings<double, 3, 3, uint16_t>(m, "Cluster3x3d");
define_cluster_finder_mt_bindings<float, 3, 3, uint16_t>(m, "Cluster3x3f");
define_cluster_finder_mt_bindings<int, 2, 2, uint16_t>(m, "Cluster2x2i");
define_cluster_finder_mt_bindings<double, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster_finder_mt_bindings<float, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_file_sink_bindings<Cluster<int, 3, 3>>(m, "Cluster3x3i");
define_cluster_file_sink_bindings<Cluster<double, 3, 3>>(m, "Cluster3x3d");
define_cluster_file_sink_bindings<Cluster<float, 3, 3>>(m, "Cluster3x3f");
define_cluster_file_sink_bindings<Cluster<int, 2, 2>>(m, "Cluster2x2i");
define_cluster_file_sink_bindings<Cluster<double, 2, 2>>(m, "Cluster2x2d");
define_cluster_file_sink_bindings<Cluster<float, 2, 2>>(m, "Cluster2x2f");
define_cluster_file_sink_bindings<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_file_sink_bindings<double, 3, 3, uint16_t>(m, "Cluster3x3d");
define_cluster_file_sink_bindings<float, 3, 3, uint16_t>(m, "Cluster3x3f");
define_cluster_file_sink_bindings<int, 2, 2, uint16_t>(m, "Cluster2x2i");
define_cluster_file_sink_bindings<double, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster_file_sink_bindings<float, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_collector_bindings<Cluster<int, 3, 3>>(m, "Cluster3x3i");
define_cluster_collector_bindings<Cluster<double, 3, 3>>(m, "Cluster3x3f");
define_cluster_collector_bindings<Cluster<float, 3, 3>>(m, "Cluster3x3d");
define_cluster_collector_bindings<Cluster<int, 2, 2>>(m, "Cluster2x2i");
define_cluster_collector_bindings<Cluster<double, 2, 2>>(m, "Cluster2x2f");
define_cluster_collector_bindings<Cluster<float, 2, 2>>(m, "Cluster2x2d");
define_cluster_collector_bindings<int, 3, 3, uint16_t>(m, "Cluster3x3i");
define_cluster_collector_bindings<double, 3, 3, uint16_t>(m, "Cluster3x3f");
define_cluster_collector_bindings<float, 3, 3, uint16_t>(m, "Cluster3x3d");
define_cluster_collector_bindings<int, 2, 2, uint16_t>(m, "Cluster2x2i");
define_cluster_collector_bindings<double, 2, 2, uint16_t>(m, "Cluster2x2f");
define_cluster_collector_bindings<float, 2, 2, uint16_t>(m, "Cluster2x2d");
define_cluster<int, 3, 3, uint16_t>(m, "3x3i");
define_cluster<float, 3, 3, uint16_t>(m, "3x3f");
@ -77,4 +80,11 @@ PYBIND11_MODULE(_aare, m) {
define_cluster<int, 2, 2, uint16_t>(m, "2x2i");
define_cluster<float, 2, 2, uint16_t>(m, "2x2f");
define_cluster<double, 2, 2, uint16_t>(m, "2x2d");
register_calculate_eta<int, 3, 3, uint16_t>(m);
register_calculate_eta<float, 3, 3, uint16_t>(m);
register_calculate_eta<double, 3, 3, uint16_t>(m);
register_calculate_eta<int, 2, 2, uint16_t>(m);
register_calculate_eta<float, 2, 2, uint16_t>(m);
register_calculate_eta<double, 2, 2, uint16_t>(m);
}

View File

@ -74,10 +74,10 @@ template <typename T, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
struct fmt_format_trait<Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>> {
static std::string value() {
return fmt::format("T{{{}:x;{}:y;{}:data;}}",
return fmt::format("T{{{}:x:{}:y:{}:data:}}",
py::format_descriptor<CoordType>::format(),
py::format_descriptor<CoordType>::format(),
fmt::format("{}{}", ClusterSizeX * ClusterSizeY,
fmt::format("({},{}){}", ClusterSizeX, ClusterSizeY,
py::format_descriptor<T>::format()));
}
};

34
python/tests/conftest.py Normal file
View File

@ -0,0 +1,34 @@
import os
from pathlib import Path
import pytest
def pytest_addoption(parser):
parser.addoption(
"--files", action="store_true", default=False, help="run slow tests"
)
def pytest_configure(config):
config.addinivalue_line("markers", "files: mark test as needing image files to run")
def pytest_collection_modifyitems(config, items):
if config.getoption("--files"):
return
skip = pytest.mark.skip(reason="need --files option to run")
for item in items:
if "files" in item.keywords:
item.add_marker(skip)
@pytest.fixture
def test_data_path():
env_value = os.environ.get("AARE_TEST_DATA")
if not env_value:
raise RuntimeError("Environment variable AARE_TEST_DATA is not set or is empty")
return Path(env_value)

View File

@ -1,12 +1,20 @@
import pytest
import numpy as np
from _aare import ClusterVector_Cluster3x3i, Interpolator, Cluster3x3i, ClusterFinder_Cluster3x3i
import aare._aare as aare
from conftest import test_data_path
def test_cluster_vector_can_be_converted_to_numpy():
cv = aare.ClusterVector_Cluster3x3i()
arr = np.array(cv, copy=False)
assert arr.shape == (0,) # 4 for x, y, size, energy and 9 for the cluster data
def test_ClusterVector():
"""Test ClusterVector"""
clustervector = ClusterVector_Cluster3x3i()
clustervector = aare.ClusterVector_Cluster3x3i()
assert clustervector.cluster_size_x == 3
assert clustervector.cluster_size_y == 3
assert clustervector.item_size() == 4+9*4
@ -14,14 +22,16 @@ def test_ClusterVector():
assert clustervector.capacity == 1024
assert clustervector.size == 0
cluster = Cluster3x3i(0,0,np.ones(9, dtype=np.int32))
cluster = aare.Cluster3x3i(0,0,np.ones(9, dtype=np.int32))
clustervector.push_back(cluster)
assert clustervector.size == 1
#push_back - check size
with pytest.raises(TypeError): # Or use the appropriate exception type
clustervector.push_back(aare.Cluster2x2i(0,0,np.ones(4, dtype=np.int32)))
with pytest.raises(TypeError):
clustervector.push_back(aare.Cluster3x3f(0,0,np.ones(9, dtype=np.float32)))
def test_Interpolator():
"""Test Interpolator"""
@ -31,31 +41,92 @@ def test_Interpolator():
ybins = np.linspace(0, 5, 30, dtype=np.float64)
etacube = np.zeros(shape=[30, 30, 20], dtype=np.float64)
interpolator = Interpolator(etacube, xbins, ybins, ebins)
interpolator = aare.Interpolator(etacube, xbins, ybins, ebins)
assert interpolator.get_ietax().shape == (30,30,20)
assert interpolator.get_ietay().shape == (30,30,20)
clustervector = ClusterVector_Cluster3x3i()
clustervector = aare.ClusterVector_Cluster3x3i()
cluster = Cluster3x3i(0,0, np.ones(9, dtype=np.int32))
#clustervector.push_back(cluster)
#num_clusters = 1;
cluster = aare.Cluster3x3i(0,0, np.ones(9, dtype=np.int32))
clustervector.push_back(cluster)
#assert interpolator.interpolate_Cluster3x3i(clustervector).shape == (num_clusters, 3)
interpolated_photons = interpolator.interpolate(clustervector)
assert interpolated_photons.size == 1
assert interpolated_photons[0]["x"] == -1
assert interpolated_photons[0]["y"] == -1
assert interpolated_photons[0]["energy"] == 4 #eta_sum = 4, dx, dy = -1,-1 m_ietax = 0, m_ietay = 0
clustervector = aare.ClusterVector_Cluster2x2i()
cluster = aare.Cluster2x2i(0,0, np.ones(4, dtype=np.int32))
clustervector.push_back(cluster)
interpolated_photons = interpolator.interpolate(clustervector)
assert interpolated_photons.size == 1
assert interpolated_photons[0]["x"] == 0
assert interpolated_photons[0]["y"] == 0
assert interpolated_photons[0]["energy"] == 4
@pytest.mark.files
def test_cluster_file(test_data_path):
"""Test ClusterFile"""
cluster_file = aare.ClusterFile_Cluster3x3i(test_data_path / "clust/single_frame_97_clustrers.clust")
clustervector = cluster_file.read_clusters(10) #conversion does not work
cluster_file.close()
assert clustervector.size == 10
###reading with wrong file
with pytest.raises(TypeError):
cluster_file = aare.ClusterFile_Cluster2x2i(test_data_path / "clust/single_frame_97_clustrers.clust")
cluster_file.close()
def test_calculate_eta():
"""Calculate Eta"""
clusters = aare.ClusterVector_Cluster3x3i()
clusters.push_back(aare.Cluster3x3i(0,0, np.ones(9, dtype=np.int32)))
clusters.push_back(aare.Cluster3x3i(0,0, np.array([1,1,1,2,2,2,3,3,3])))
eta2 = aare.calculate_eta2(clusters)
assert eta2.shape == (2,2)
assert eta2[0,0] == 0.5
assert eta2[0,1] == 0.5
assert eta2[1,0] == 0.5
assert eta2[1,1] == 0.6 #1/5
def test_cluster_finder():
"""Test ClusterFinder"""
clusterfinder = aare.ClusterFinder_Cluster3x3i([100,100])
#frame = np.random.rand(100,100)
frame = np.zeros(shape=[100,100])
clusterfinder.find_clusters(frame)
clusters = clusterfinder.steal_clusters(False) #conversion does not work
assert clusters.size == 0
#def test_cluster_file():
#TODO dont understand behavior
def test_cluster_collector():
"""Test ClusterCollector"""
#def test_cluster_finder():
#"""Test ClusterFinder"""
clusterfinder = aare.ClusterFinderMT_Cluster3x3i([100,100]) #TODO: no idea what the data is in InputQueue not zero
#clusterfinder = ClusterFinder_Cluster3x3i([100,100])
clustercollector = aare.ClusterCollector_Cluster3x3i(clusterfinder)
#clusterfinder.find_clusters()
cluster_vectors = clustercollector.steal_clusters()
#clusters = clusterfinder.steal_clusters()
#print("cluster size: ", clusters.size())
assert len(cluster_vectors) == 1 #single thread execution
assert cluster_vectors[0].size == 0 #

View File

@ -0,0 +1,92 @@
import pytest
import numpy as np
from aare import JungfrauDataFile
@pytest.mark.files
def test_jfungfrau_dat_read_number_of_frames(test_data_path):
with JungfrauDataFile(test_data_path / "dat/AldoJF500k_000000.dat") as dat_file:
assert dat_file.total_frames == 24
with JungfrauDataFile(test_data_path / "dat/AldoJF250k_000000.dat") as dat_file:
assert dat_file.total_frames == 53
with JungfrauDataFile(test_data_path / "dat/AldoJF65k_000000.dat") as dat_file:
assert dat_file.total_frames == 113
@pytest.mark.files
def test_jfungfrau_dat_read_number_of_file(test_data_path):
with JungfrauDataFile(test_data_path / "dat/AldoJF500k_000000.dat") as dat_file:
assert dat_file.n_files == 4
with JungfrauDataFile(test_data_path / "dat/AldoJF250k_000000.dat") as dat_file:
assert dat_file.n_files == 7
with JungfrauDataFile(test_data_path / "dat/AldoJF65k_000000.dat") as dat_file:
assert dat_file.n_files == 7
@pytest.mark.files
def test_read_module(test_data_path):
"""
Read all frames from the series of .dat files. Compare to canned data in npz format.
"""
# Read all frames from the .dat file
with JungfrauDataFile(test_data_path / "dat/AldoJF500k_000000.dat") as f:
header, data = f.read()
#Sanity check
n_frames = 24
assert header.size == n_frames
assert data.shape == (n_frames, 512, 1024)
# Read reference data using numpy
with np.load(test_data_path / "dat/AldoJF500k.npz") as f:
ref_header = f["headers"]
ref_data = f["frames"]
# Check that the data is the same
assert np.all(ref_header == header)
assert np.all(ref_data == data)
@pytest.mark.files
def test_read_half_module(test_data_path):
# Read all frames from the .dat file
with JungfrauDataFile(test_data_path / "dat/AldoJF250k_000000.dat") as f:
header, data = f.read()
n_frames = 53
assert header.size == n_frames
assert data.shape == (n_frames, 256, 1024)
# Read reference data using numpy
with np.load(test_data_path / "dat/AldoJF250k.npz") as f:
ref_header = f["headers"]
ref_data = f["frames"]
# Check that the data is the same
assert np.all(ref_header == header)
assert np.all(ref_data == data)
@pytest.mark.files
def test_read_single_chip(test_data_path):
# Read all frames from the .dat file
with JungfrauDataFile(test_data_path / "dat/AldoJF65k_000000.dat") as f:
header, data = f.read()
n_frames = 113
assert header.size == n_frames
assert data.shape == (n_frames, 256, 256)
# Read reference data using numpy
with np.load(test_data_path / "dat/AldoJF65k.npz") as f:
ref_header = f["headers"]
ref_data = f["frames"]
# Check that the data is the same
assert np.all(ref_header == header)
assert np.all(ref_data == data)

View File

@ -37,16 +37,6 @@ auto get_test_parameters() {
Eta2<int>{3. / 5, 4. / 6, 1, 11}));
}
TEST_CASE("compute_largest_2x2_subcluster", "[.eta_calculation]") {
auto [cluster, expected_eta] = get_test_parameters();
auto [sum, index] = std::visit(
[](const auto &clustertype) { return clustertype.max_sum_2x2(); },
cluster);
CHECK(expected_eta.c == index);
CHECK(expected_eta.sum == sum);
}
TEST_CASE("calculate_eta2", "[.eta_calculation]") {
auto [cluster, expected_eta] = get_test_parameters();

View File

@ -26,3 +26,49 @@ TEST_CASE("Correct Instantiation of Cluster and ClusterVector",
CHECK(not is_cluster_v<int>);
CHECK(is_cluster_v<Cluster<int, 3, 3>>);
}
<<<<<<< Updated upstream
=======
using ClusterTypes =
std::variant<Cluster<int, 2, 2>, Cluster<int, 3, 3>, Cluster<int, 5, 5>,
Cluster<int, 4, 2>, Cluster<int, 2, 3>>;
auto get_test_sum_parameters() {
return GENERATE(
std::make_tuple(ClusterTypes{Cluster<int, 2, 2>{0, 0, {1, 2, 3, 1}}},
std::make_pair(7, 0)),
std::make_tuple(
ClusterTypes{Cluster<int, 3, 3>{0, 0, {1, 2, 3, 4, 5, 6, 1, 2, 7}}},
std::make_pair(20, 3)),
std::make_tuple(ClusterTypes{Cluster<int, 5, 5>{
0, 0, {1, 6, 7, 6, 5, 4, 3, 2, 1, 8, 8, 9, 2,
1, 4, 5, 6, 7, 8, 4, 1, 1, 1, 1, 1}}},
std::make_pair(28, 8)),
std::make_tuple(
ClusterTypes{Cluster<int, 4, 2>{0, 0, {1, 4, 7, 2, 5, 6, 4, 3}}},
std::make_pair(21, 1)),
std::make_tuple(
ClusterTypes{Cluster<int, 2, 3>{0, 0, {1, 3, 2, 3, 4, 2}}},
std::make_pair(11, 1)));
}
TEST_CASE("compute_largest_2x2_subcluster", "[.cluster]") {
auto [cluster, sum_pair] = get_test_sum_parameters();
auto sum = std::visit(
[](const auto &clustertype) { return clustertype.max_sum_2x2(); },
cluster);
CHECK(sum_pair.first == sum.first);
CHECK(sum_pair.second == sum.second);
}
TEST_CASE("Test sum of Cluster", "[.cluster]") {
Cluster<int, 2, 2> cluster{0, 0, {1, 2, 3, 4}};
CHECK(cluster.sum() == 10);
Cluster<int, 2, 3> cluster2x3{0, 0, {1, 3, 2, 3, 4, 2}};
CHECK(cluster2x3.sum() == 15);
}
>>>>>>> Stashed changes

View File

@ -8,10 +8,9 @@
using aare::Cluster;
using aare::ClusterFile;
TEST_CASE("Read one frame from a a cluster file", "[.integration]") {
TEST_CASE("Read one frame from a a cluster file", "[.files]") {
// We know that the frame has 97 clusters
auto fpath =
test_data_path() / "clusters" / "single_frame_97_clustrers.clust";
auto fpath = test_data_path() / "clust" / "single_frame_97_clustrers.clust";
REQUIRE(std::filesystem::exists(fpath));
ClusterFile<Cluster<int32_t, 3, 3>> f(fpath);
@ -20,10 +19,9 @@ TEST_CASE("Read one frame from a a cluster file", "[.integration]") {
REQUIRE(clusters.frame_number() == 135);
}
TEST_CASE("Read one frame using ROI", "[.integration]") {
TEST_CASE("Read one frame using ROI", "[.files]") {
// We know that the frame has 97 clusters
auto fpath =
test_data_path() / "clusters" / "single_frame_97_clustrers.clust";
auto fpath = test_data_path() / "clust" / "single_frame_97_clustrers.clust";
REQUIRE(std::filesystem::exists(fpath));
ClusterFile<Cluster<int32_t, 3, 3>> f(fpath);
@ -47,10 +45,108 @@ TEST_CASE("Read one frame using ROI", "[.integration]") {
}
}
TEST_CASE("Read clusters from single frame file", "[.integration]") {
TEST_CASE("Read clusters from single frame file", "[.files]") {
auto fpath =
test_data_path() / "clusters" / "single_frame_97_clustrers.clust";
// frame_number, num_clusters [135] 97
// [ 1 200] [0 1 2 3 4 5 6 7 8]
// [ 2 201] [ 9 10 11 12 13 14 15 16 17]
// [ 3 202] [18 19 20 21 22 23 24 25 26]
// [ 4 203] [27 28 29 30 31 32 33 34 35]
// [ 5 204] [36 37 38 39 40 41 42 43 44]
// [ 6 205] [45 46 47 48 49 50 51 52 53]
// [ 7 206] [54 55 56 57 58 59 60 61 62]
// [ 8 207] [63 64 65 66 67 68 69 70 71]
// [ 9 208] [72 73 74 75 76 77 78 79 80]
// [ 10 209] [81 82 83 84 85 86 87 88 89]
// [ 11 210] [90 91 92 93 94 95 96 97 98]
// [ 12 211] [ 99 100 101 102 103 104 105 106 107]
// [ 13 212] [108 109 110 111 112 113 114 115 116]
// [ 14 213] [117 118 119 120 121 122 123 124 125]
// [ 15 214] [126 127 128 129 130 131 132 133 134]
// [ 16 215] [135 136 137 138 139 140 141 142 143]
// [ 17 216] [144 145 146 147 148 149 150 151 152]
// [ 18 217] [153 154 155 156 157 158 159 160 161]
// [ 19 218] [162 163 164 165 166 167 168 169 170]
// [ 20 219] [171 172 173 174 175 176 177 178 179]
// [ 21 220] [180 181 182 183 184 185 186 187 188]
// [ 22 221] [189 190 191 192 193 194 195 196 197]
// [ 23 222] [198 199 200 201 202 203 204 205 206]
// [ 24 223] [207 208 209 210 211 212 213 214 215]
// [ 25 224] [216 217 218 219 220 221 222 223 224]
// [ 26 225] [225 226 227 228 229 230 231 232 233]
// [ 27 226] [234 235 236 237 238 239 240 241 242]
// [ 28 227] [243 244 245 246 247 248 249 250 251]
// [ 29 228] [252 253 254 255 256 257 258 259 260]
// [ 30 229] [261 262 263 264 265 266 267 268 269]
// [ 31 230] [270 271 272 273 274 275 276 277 278]
// [ 32 231] [279 280 281 282 283 284 285 286 287]
// [ 33 232] [288 289 290 291 292 293 294 295 296]
// [ 34 233] [297 298 299 300 301 302 303 304 305]
// [ 35 234] [306 307 308 309 310 311 312 313 314]
// [ 36 235] [315 316 317 318 319 320 321 322 323]
// [ 37 236] [324 325 326 327 328 329 330 331 332]
// [ 38 237] [333 334 335 336 337 338 339 340 341]
// [ 39 238] [342 343 344 345 346 347 348 349 350]
// [ 40 239] [351 352 353 354 355 356 357 358 359]
// [ 41 240] [360 361 362 363 364 365 366 367 368]
// [ 42 241] [369 370 371 372 373 374 375 376 377]
// [ 43 242] [378 379 380 381 382 383 384 385 386]
// [ 44 243] [387 388 389 390 391 392 393 394 395]
// [ 45 244] [396 397 398 399 400 401 402 403 404]
// [ 46 245] [405 406 407 408 409 410 411 412 413]
// [ 47 246] [414 415 416 417 418 419 420 421 422]
// [ 48 247] [423 424 425 426 427 428 429 430 431]
// [ 49 248] [432 433 434 435 436 437 438 439 440]
// [ 50 249] [441 442 443 444 445 446 447 448 449]
// [ 51 250] [450 451 452 453 454 455 456 457 458]
// [ 52 251] [459 460 461 462 463 464 465 466 467]
// [ 53 252] [468 469 470 471 472 473 474 475 476]
// [ 54 253] [477 478 479 480 481 482 483 484 485]
// [ 55 254] [486 487 488 489 490 491 492 493 494]
// [ 56 255] [495 496 497 498 499 500 501 502 503]
// [ 57 256] [504 505 506 507 508 509 510 511 512]
// [ 58 257] [513 514 515 516 517 518 519 520 521]
// [ 59 258] [522 523 524 525 526 527 528 529 530]
// [ 60 259] [531 532 533 534 535 536 537 538 539]
// [ 61 260] [540 541 542 543 544 545 546 547 548]
// [ 62 261] [549 550 551 552 553 554 555 556 557]
// [ 63 262] [558 559 560 561 562 563 564 565 566]
// [ 64 263] [567 568 569 570 571 572 573 574 575]
// [ 65 264] [576 577 578 579 580 581 582 583 584]
// [ 66 265] [585 586 587 588 589 590 591 592 593]
// [ 67 266] [594 595 596 597 598 599 600 601 602]
// [ 68 267] [603 604 605 606 607 608 609 610 611]
// [ 69 268] [612 613 614 615 616 617 618 619 620]
// [ 70 269] [621 622 623 624 625 626 627 628 629]
// [ 71 270] [630 631 632 633 634 635 636 637 638]
// [ 72 271] [639 640 641 642 643 644 645 646 647]
// [ 73 272] [648 649 650 651 652 653 654 655 656]
// [ 74 273] [657 658 659 660 661 662 663 664 665]
// [ 75 274] [666 667 668 669 670 671 672 673 674]
// [ 76 275] [675 676 677 678 679 680 681 682 683]
// [ 77 276] [684 685 686 687 688 689 690 691 692]
// [ 78 277] [693 694 695 696 697 698 699 700 701]
// [ 79 278] [702 703 704 705 706 707 708 709 710]
// [ 80 279] [711 712 713 714 715 716 717 718 719]
// [ 81 280] [720 721 722 723 724 725 726 727 728]
// [ 82 281] [729 730 731 732 733 734 735 736 737]
// [ 83 282] [738 739 740 741 742 743 744 745 746]
// [ 84 283] [747 748 749 750 751 752 753 754 755]
// [ 85 284] [756 757 758 759 760 761 762 763 764]
// [ 86 285] [765 766 767 768 769 770 771 772 773]
// [ 87 286] [774 775 776 777 778 779 780 781 782]
// [ 88 287] [783 784 785 786 787 788 789 790 791]
// [ 89 288] [792 793 794 795 796 797 798 799 800]
// [ 90 289] [801 802 803 804 805 806 807 808 809]
// [ 91 290] [810 811 812 813 814 815 816 817 818]
// [ 92 291] [819 820 821 822 823 824 825 826 827]
// [ 93 292] [828 829 830 831 832 833 834 835 836]
// [ 94 293] [837 838 839 840 841 842 843 844 845]
// [ 95 294] [846 847 848 849 850 851 852 853 854]
// [ 96 295] [855 856 857 858 859 860 861 862 863]
// [ 97 296] [864 865 866 867 868 869 870 871 872]
auto fpath = test_data_path() / "clust" / "single_frame_97_clustrers.clust";
REQUIRE(std::filesystem::exists(fpath));
SECTION("Read fewer clusters than available") {
@ -71,5 +167,18 @@ TEST_CASE("Read clusters from single frame file", "[.integration]") {
auto clusters = f.read_clusters(97);
REQUIRE(clusters.size() == 97);
REQUIRE(clusters.frame_number() == 135);
REQUIRE(clusters.at(0).x == 1);
REQUIRE(clusters.at(0).y == 200);
}
}
TEST_CASE("Read clusters", "[.files]") {
// beam_En700eV_-40deg_300V_10us_d0_f0_100.clust
auto fpath = test_data_path() / "clust" /
"beam_En700eV_-40deg_300V_10us_d0_f0_100.clust";
REQUIRE(std::filesystem::exists(fpath));
ClusterFile<Cluster<int32_t, 3, 3>> f(fpath);
auto clusters = f.read_clusters(500);
}

View File

@ -8,6 +8,45 @@
using aare::Cluster;
using aare::ClusterVector;
TEST_CASE("item_size return the size of the cluster stored"){
using C1 = Cluster<int32_t, 2, 2>;
ClusterVector<C1> cv(4);
CHECK(cv.item_size() == sizeof(C1));
//Sanity check
//2*2*4 = 16 bytes of data for the cluster
// 2*2 = 4 bytes for the x and y coordinates
REQUIRE(cv.item_size() == 20);
using C2 = Cluster<int32_t, 3, 3>;
ClusterVector<C2> cv2(4);
CHECK(cv2.item_size() == sizeof(C2));
using C3 = Cluster<double, 2, 3>;
ClusterVector<C3> cv3(4);
CHECK(cv3.item_size() == sizeof(C3));
using C4 = Cluster<char, 10, 5>;
ClusterVector<C4> cv4(4);
CHECK(cv4.item_size() == sizeof(C4));
<<<<<<< Updated upstream
=======
using C5 = Cluster<int32_t, 2, 3>;
ClusterVector<C5> cv5(4);
CHECK(cv5.item_size() == sizeof(C5));
using C6 = Cluster<double, 5, 5>;
ClusterVector<C6> cv6(4);
CHECK(cv6.item_size() == sizeof(C6)); // double uses padding!!!
using C7 = Cluster<double, 3, 3>;
ClusterVector<C7> cv7(4);
CHECK(cv7.item_size() == sizeof(C7));
>>>>>>> Stashed changes
}
TEST_CASE("ClusterVector 2x2 int32_t capacity 4, push back then read",
"[.ClusterVector]") {
@ -187,6 +226,32 @@ TEST_CASE("Concatenate two cluster vectors where we need to allocate",
REQUIRE(ptr[3].y == 17);
}
TEST_CASE("calculate cluster sum", "[.ClusterVector]") {
ClusterVector<Cluster<int32_t, 2, 2>> cv1(2);
Cluster<int32_t, 2, 2> c1 = {1, 2, {3, 4, 5, 6}};
cv1.push_back(c1);
Cluster<int32_t, 2, 2> c2 = {6, 7, {8, 9, 10, 11}};
cv1.push_back(c2);
auto sum1 = cv1.sum();
std::vector<int32_t> expected_sum1{18, 38};
CHECK(sum1 == expected_sum1);
ClusterVector<Cluster<int32_t, 3, 3>> cv2(2);
Cluster<int32_t, 3, 3> c3 = {1, 2, {3, 4, 5, 6, 1, 7, 8, 1, 1}};
cv2.push_back(c3);
Cluster<int32_t, 3, 3> c4 = {6, 7, {8, 9, 10, 11, 13, 5, 12, 2, 4}};
cv2.push_back(c4);
auto sum2 = cv2.sum();
std::vector<int32_t> expected_sum2{36, 74};
CHECK(sum2 == expected_sum2);
}
struct ClusterTestData {
uint8_t ClusterSizeX;
uint8_t ClusterSizeY;

View File

@ -1,4 +1,5 @@
#include "aare/File.hpp"
#include "aare/JungfrauDataFile.hpp"
#include "aare/NumpyFile.hpp"
#include "aare/RawFile.hpp"
@ -27,6 +28,8 @@ File::File(const std::filesystem::path &fname, const std::string &mode,
else if (fname.extension() == ".npy") {
// file_impl = new NumpyFile(fname, mode, cfg);
file_impl = std::make_unique<NumpyFile>(fname, mode, cfg);
}else if(fname.extension() == ".dat"){
file_impl = std::make_unique<JungfrauDataFile>(fname);
} else {
throw std::runtime_error("Unsupported file type");
}

44
src/FilePtr.cpp Normal file
View File

@ -0,0 +1,44 @@
#include "aare/FilePtr.hpp"
#include <fmt/format.h>
#include <stdexcept>
#include <utility>
namespace aare {
FilePtr::FilePtr(const std::filesystem::path& fname, const std::string& mode = "rb") {
fp_ = fopen(fname.c_str(), mode.c_str());
if (!fp_)
throw std::runtime_error(fmt::format("Could not open: {}", fname.c_str()));
}
FilePtr::FilePtr(FilePtr &&other) { std::swap(fp_, other.fp_); }
FilePtr &FilePtr::operator=(FilePtr &&other) {
std::swap(fp_, other.fp_);
return *this;
}
FILE *FilePtr::get() { return fp_; }
int64_t FilePtr::tell() {
auto pos = ftell(fp_);
if (pos == -1)
throw std::runtime_error(fmt::format("Error getting file position: {}", error_msg()));
return pos;
}
FilePtr::~FilePtr() {
if (fp_)
fclose(fp_); // check?
}
std::string FilePtr::error_msg(){
if (feof(fp_)) {
return "End of file reached";
}
if (ferror(fp_)) {
return fmt::format("Error reading file: {}", std::strerror(errno));
}
return "";
}
} // namespace aare

View File

@ -18,7 +18,7 @@ double gaus(const double x, const double *par) {
NDArray<double, 1> gaus(NDView<double, 1> x, NDView<double, 1> par) {
NDArray<double, 1> y({x.shape(0)}, 0);
for (size_t i = 0; i < x.size(); i++) {
for (ssize_t i = 0; i < x.size(); i++) {
y(i) = gaus(x(i), par.data());
}
return y;
@ -28,7 +28,7 @@ double pol1(const double x, const double *par) { return par[0] * x + par[1]; }
NDArray<double, 1> pol1(NDView<double, 1> x, NDView<double, 1> par) {
NDArray<double, 1> y({x.shape()}, 0);
for (size_t i = 0; i < x.size(); i++) {
for (ssize_t i = 0; i < x.size(); i++) {
y(i) = pol1(x(i), par.data());
}
return y;
@ -153,7 +153,7 @@ void fit_gaus(NDView<double, 1> x, NDView<double, 1> y, NDView<double, 1> y_err,
// Calculate chi2
chi2 = 0;
for (size_t i = 0; i < y.size(); i++) {
for (ssize_t i = 0; i < y.size(); i++) {
chi2 += std::pow((y(i) - func::gaus(x(i), par_out.data())) / y_err(i), 2);
}
}
@ -205,7 +205,7 @@ void fit_pol1(NDView<double, 1> x, NDView<double, 1> y, NDView<double, 1> y_err,
// Calculate chi2
chi2 = 0;
for (size_t i = 0; i < y.size(); i++) {
for (ssize_t i = 0; i < y.size(); i++) {
chi2 += std::pow((y(i) - func::pol1(x(i), par_out.data())) / y_err(i), 2);
}
}

238
src/JungfrauDataFile.cpp Normal file
View File

@ -0,0 +1,238 @@
#include "aare/JungfrauDataFile.hpp"
#include "aare/algorithm.hpp"
#include "aare/defs.hpp"
#include <cerrno>
#include <fmt/format.h>
namespace aare {
JungfrauDataFile::JungfrauDataFile(const std::filesystem::path &fname) {
if (!std::filesystem::exists(fname)) {
throw std::runtime_error(LOCATION +
"File does not exist: " + fname.string());
}
find_frame_size(fname);
parse_fname(fname);
scan_files();
open_file(m_current_file_index);
}
// FileInterface
Frame JungfrauDataFile::read_frame(){
Frame f(rows(), cols(), Dtype::UINT16);
read_into(reinterpret_cast<std::byte *>(f.data()), nullptr);
return f;
}
Frame JungfrauDataFile::read_frame(size_t frame_number){
seek(frame_number);
Frame f(rows(), cols(), Dtype::UINT16);
read_into(reinterpret_cast<std::byte *>(f.data()), nullptr);
return f;
}
std::vector<Frame> JungfrauDataFile::read_n(size_t n_frames) {
std::vector<Frame> frames;
for(size_t i = 0; i < n_frames; ++i){
frames.push_back(read_frame());
}
return frames;
}
void JungfrauDataFile::read_into(std::byte *image_buf) {
read_into(image_buf, nullptr);
}
void JungfrauDataFile::read_into(std::byte *image_buf, size_t n_frames) {
read_into(image_buf, n_frames, nullptr);
}
size_t JungfrauDataFile::frame_number(size_t frame_index) {
seek(frame_index);
return read_header().framenum;
}
std::array<ssize_t, 2> JungfrauDataFile::shape() const {
return {static_cast<ssize_t>(rows()), static_cast<ssize_t>(cols())};
}
DetectorType JungfrauDataFile::detector_type() const { return DetectorType::Jungfrau; }
std::string JungfrauDataFile::base_name() const { return m_base_name; }
size_t JungfrauDataFile::bytes_per_frame() { return m_bytes_per_frame; }
size_t JungfrauDataFile::pixels_per_frame() { return m_rows * m_cols; }
size_t JungfrauDataFile::bytes_per_pixel() const { return sizeof(pixel_type); }
size_t JungfrauDataFile::bitdepth() const {
return bytes_per_pixel() * bits_per_byte;
}
void JungfrauDataFile::seek(size_t frame_index) {
if (frame_index >= m_total_frames) {
throw std::runtime_error(LOCATION + "Frame index out of range: " +
std::to_string(frame_index));
}
m_current_frame_index = frame_index;
auto file_index = first_larger(m_last_frame_in_file, frame_index);
if (file_index != m_current_file_index)
open_file(file_index);
auto frame_offset = (file_index)
? frame_index - m_last_frame_in_file[file_index - 1]
: frame_index;
auto byte_offset = frame_offset * (m_bytes_per_frame + header_size);
m_fp.seek(byte_offset);
};
size_t JungfrauDataFile::tell() { return m_current_frame_index; }
size_t JungfrauDataFile::total_frames() const { return m_total_frames; }
size_t JungfrauDataFile::rows() const { return m_rows; }
size_t JungfrauDataFile::cols() const { return m_cols; }
size_t JungfrauDataFile::n_files() const { return m_last_frame_in_file.size(); }
void JungfrauDataFile::find_frame_size(const std::filesystem::path &fname) {
static constexpr size_t module_data_size =
header_size + sizeof(pixel_type) * 512 * 1024;
static constexpr size_t half_data_size =
header_size + sizeof(pixel_type) * 256 * 1024;
static constexpr size_t chip_data_size =
header_size + sizeof(pixel_type) * 256 * 256;
auto file_size = std::filesystem::file_size(fname);
if (file_size == 0) {
throw std::runtime_error(LOCATION +
"Cannot guess frame size: file is empty");
}
if (file_size % module_data_size == 0) {
m_rows = 512;
m_cols = 1024;
m_bytes_per_frame = module_data_size - header_size;
} else if (file_size % half_data_size == 0) {
m_rows = 256;
m_cols = 1024;
m_bytes_per_frame = half_data_size - header_size;
} else if (file_size % chip_data_size == 0) {
m_rows = 256;
m_cols = 256;
m_bytes_per_frame = chip_data_size - header_size;
} else {
throw std::runtime_error(LOCATION +
"Cannot find frame size: file size is not a "
"multiple of any known frame size");
}
}
void JungfrauDataFile::parse_fname(const std::filesystem::path &fname) {
m_path = fname.parent_path();
m_base_name = fname.stem();
// find file index, then remove if from the base name
if (auto pos = m_base_name.find_last_of('_'); pos != std::string::npos) {
m_offset = std::stoul(m_base_name.substr(pos + 1));
m_base_name.erase(pos);
}
}
void JungfrauDataFile::scan_files() {
// find how many files we have and the number of frames in each file
m_last_frame_in_file.clear();
size_t file_index = m_offset;
while (std::filesystem::exists(fpath(file_index))) {
auto n_frames = std::filesystem::file_size(fpath(file_index)) /
(m_bytes_per_frame + header_size);
m_last_frame_in_file.push_back(n_frames);
++file_index;
}
// find where we need to open the next file and total number of frames
m_last_frame_in_file = cumsum(m_last_frame_in_file);
m_total_frames = m_last_frame_in_file.back();
}
void JungfrauDataFile::read_into(std::byte *image_buf,
JungfrauDataHeader *header) {
// read header if not passed nullptr
if (header) {
if (auto rc = fread(header, sizeof(JungfrauDataHeader), 1, m_fp.get());
rc != 1) {
throw std::runtime_error(
LOCATION +
"Could not read header from file:" + m_fp.error_msg());
}
} else {
m_fp.seek(header_size, SEEK_CUR);
}
// read data
if (auto rc = fread(image_buf, 1, m_bytes_per_frame, m_fp.get());
rc != m_bytes_per_frame) {
throw std::runtime_error(LOCATION + "Could not read image from file" +
m_fp.error_msg());
}
// prepare for next read
// if we are at the end of the file, open the next file
++m_current_frame_index;
if (m_current_frame_index >= m_last_frame_in_file[m_current_file_index] &&
(m_current_frame_index < m_total_frames)) {
++m_current_file_index;
open_file(m_current_file_index);
}
}
void JungfrauDataFile::read_into(std::byte *image_buf, size_t n_frames,
JungfrauDataHeader *header) {
if (header) {
for (size_t i = 0; i < n_frames; ++i)
read_into(image_buf + i * m_bytes_per_frame, header + i);
}else{
for (size_t i = 0; i < n_frames; ++i)
read_into(image_buf + i * m_bytes_per_frame, nullptr);
}
}
void JungfrauDataFile::read_into(NDArray<uint16_t>* image, JungfrauDataHeader* header) {
if(image->shape()!=shape()){
throw std::runtime_error(LOCATION +
"Image shape does not match file size: " + std::to_string(rows()) + "x" + std::to_string(cols()));
}
read_into(reinterpret_cast<std::byte *>(image->data()), header);
}
JungfrauDataHeader JungfrauDataFile::read_header() {
JungfrauDataHeader header;
if (auto rc = fread(&header, 1, sizeof(header), m_fp.get());
rc != sizeof(header)) {
throw std::runtime_error(LOCATION + "Could not read header from file" +
m_fp.error_msg());
}
m_fp.seek(-header_size, SEEK_CUR);
return header;
}
void JungfrauDataFile::open_file(size_t file_index) {
// fmt::print(stderr, "Opening file: {}\n",
// fpath(file_index+m_offset).string());
m_fp = FilePtr(fpath(file_index + m_offset), "rb");
m_current_file_index = file_index;
}
std::filesystem::path JungfrauDataFile::fpath(size_t file_index) const {
auto fname = fmt::format("{}_{:0{}}.dat", m_base_name, file_index,
n_digits_in_file_index);
return m_path / fname;
}
} // namespace aare

View File

@ -0,0 +1,114 @@
#include "aare/JungfrauDataFile.hpp"
#include <catch2/catch_test_macros.hpp>
#include "test_config.hpp"
using aare::JungfrauDataFile;
using aare::JungfrauDataHeader;
TEST_CASE("Open a Jungfrau data file", "[.files]") {
//we know we have 4 files with 7, 7, 7, and 3 frames
//firs frame number if 1 and the bunch id is frame_number**2
//so we can check the header
auto fpath = test_data_path() / "dat" / "AldoJF500k_000000.dat";
REQUIRE(std::filesystem::exists(fpath));
JungfrauDataFile f(fpath);
REQUIRE(f.rows() == 512);
REQUIRE(f.cols() == 1024);
REQUIRE(f.bytes_per_frame() == 1048576);
REQUIRE(f.pixels_per_frame() == 524288);
REQUIRE(f.bytes_per_pixel() == 2);
REQUIRE(f.bitdepth() == 16);
REQUIRE(f.base_name() == "AldoJF500k");
REQUIRE(f.n_files() == 4);
REQUIRE(f.tell() == 0);
REQUIRE(f.total_frames() == 24);
REQUIRE(f.current_file() == fpath);
//Check that the frame number and buch id is read correctly
for (size_t i = 0; i < 24; ++i) {
JungfrauDataHeader header;
aare::NDArray<uint16_t> image(f.shape());
f.read_into(&image, &header);
REQUIRE(header.framenum == i + 1);
REQUIRE(header.bunchid == (i + 1) * (i + 1));
REQUIRE(image.shape(0) == 512);
REQUIRE(image.shape(1) == 1024);
}
}
TEST_CASE("Seek in a JungfrauDataFile", "[.files]"){
auto fpath = test_data_path() / "dat" / "AldoJF65k_000000.dat";
REQUIRE(std::filesystem::exists(fpath));
JungfrauDataFile f(fpath);
//The file should have 113 frames
f.seek(19);
REQUIRE(f.tell() == 19);
auto h = f.read_header();
REQUIRE(h.framenum == 19+1);
//Reading again does not change the file pointer
auto h2 = f.read_header();
REQUIRE(h2.framenum == 19+1);
f.seek(59);
REQUIRE(f.tell() == 59);
auto h3 = f.read_header();
REQUIRE(h3.framenum == 59+1);
JungfrauDataHeader h4;
aare::NDArray<uint16_t> image(f.shape());
f.read_into(&image, &h4);
REQUIRE(h4.framenum == 59+1);
//now we should be on the next frame
REQUIRE(f.tell() == 60);
REQUIRE(f.read_header().framenum == 60+1);
REQUIRE_THROWS(f.seek(86356)); //out of range
}
TEST_CASE("Open a Jungfrau data file with non zero file index", "[.files]"){
auto fpath = test_data_path() / "dat" / "AldoJF65k_000003.dat";
REQUIRE(std::filesystem::exists(fpath));
JungfrauDataFile f(fpath);
//18 files per data file, opening the 3rd file we ignore the first 3
REQUIRE(f.total_frames() == 113-18*3);
REQUIRE(f.tell() == 0);
//Frame numbers start at 1 in the first file
REQUIRE(f.read_header().framenum == 18*3+1);
// moving relative to the third file
f.seek(5);
REQUIRE(f.read_header().framenum == 18*3+1+5);
// ignoring the first 3 files
REQUIRE(f.n_files() == 4);
REQUIRE(f.current_file().stem() == "AldoJF65k_000003");
}
TEST_CASE("Read into throws if size doesn't match", "[.files]"){
auto fpath = test_data_path() / "dat" / "AldoJF65k_000000.dat";
REQUIRE(std::filesystem::exists(fpath));
JungfrauDataFile f(fpath);
aare::NDArray<uint16_t> image({39, 85});
JungfrauDataHeader header;
REQUIRE_THROWS(f.read_into(&image, &header));
REQUIRE_THROWS(f.read_into(&image, nullptr));
REQUIRE_THROWS(f.read_into(&image));
REQUIRE(f.tell() == 0);
}

View File

@ -183,14 +183,14 @@ TEST_CASE("Size and shape matches") {
int64_t h = 75;
std::array<int64_t, 2> shape{w, h};
NDArray<double> a{shape};
REQUIRE(a.size() == static_cast<uint64_t>(w * h));
REQUIRE(a.size() == w * h);
REQUIRE(a.shape() == shape);
}
TEST_CASE("Initial value matches for all elements") {
double v = 4.35;
NDArray<double> a{{5, 5}, v};
for (uint32_t i = 0; i < a.size(); ++i) {
for (int i = 0; i < a.size(); ++i) {
REQUIRE(a(i) == v);
}
}

View File

@ -5,7 +5,7 @@
TEST_CASE("Find the closed index in a 1D array", "[algorithm]") {
aare::NDArray<double, 1> arr({5});
for (size_t i = 0; i < arr.size(); i++) {
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
@ -18,7 +18,7 @@ TEST_CASE("Find the closed index in a 1D array", "[algorithm]") {
TEST_CASE("Passing integers to nearest_index works", "[algorithm]") {
aare::NDArray<int, 1> arr({5});
for (size_t i = 0; i < arr.size(); i++) {
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
@ -47,9 +47,22 @@ TEST_CASE("nearest index works with std::array", "[algorithm]") {
REQUIRE(aare::nearest_index(arr, -10.0) == 0);
}
TEST_CASE("nearest index when there is no different uses the first element",
"[algorithm]") {
std::vector<int> vec = {5, 5, 5, 5, 5};
REQUIRE(aare::nearest_index(vec, 5) == 0);
}
TEST_CASE("nearest index when there is no different uses the first element "
"also when all smaller",
"[algorithm]") {
std::vector<int> vec = {5, 5, 5, 5, 5};
REQUIRE(aare::nearest_index(vec, 10) == 0);
}
TEST_CASE("last smaller", "[algorithm]") {
aare::NDArray<double, 1> arr({5});
for (size_t i = 0; i < arr.size(); i++) {
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
@ -61,9 +74,89 @@ TEST_CASE("last smaller", "[algorithm]") {
TEST_CASE("returns last bin strictly smaller", "[algorithm]") {
aare::NDArray<double, 1> arr({5});
for (size_t i = 0; i < arr.size(); i++) {
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
REQUIRE(aare::last_smaller(arr, 2.0) == 2);
}
REQUIRE(aare::last_smaller(arr, 2.0) == 1);
}
TEST_CASE("last_smaller with all elements smaller returns last element",
"[algorithm]") {
aare::NDArray<double, 1> arr({5});
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
REQUIRE(aare::last_smaller(arr, 50.) == 4);
}
TEST_CASE("last_smaller with all elements bigger returns first element",
"[algorithm]") {
aare::NDArray<double, 1> arr({5});
for (ssize_t i = 0; i < arr.size(); i++) {
arr[i] = i;
}
// arr 0, 1, 2, 3, 4
REQUIRE(aare::last_smaller(arr, -50.) == 0);
}
TEST_CASE("last smaller with all elements equal returns the first element",
"[algorithm]") {
std::vector<int> vec = {5, 5, 5, 5, 5, 5, 5};
REQUIRE(aare::last_smaller(vec, 5) == 0);
}
TEST_CASE("first_lager with vector", "[algorithm]") {
std::vector<double> vec = {0, 1, 2, 3, 4};
REQUIRE(aare::first_larger(vec, 2.5) == 3);
}
TEST_CASE("first_lager with all elements smaller returns last element",
"[algorithm]") {
std::vector<double> vec = {0, 1, 2, 3, 4};
REQUIRE(aare::first_larger(vec, 50.) == 4);
}
TEST_CASE("first_lager with all elements bigger returns first element",
"[algorithm]") {
std::vector<double> vec = {0, 1, 2, 3, 4};
REQUIRE(aare::first_larger(vec, -50.) == 0);
}
TEST_CASE("first_lager with all elements the same as the check returns last",
"[algorithm]") {
std::vector<int> vec = {14, 14, 14, 14, 14};
REQUIRE(aare::first_larger(vec, 14) == 4);
}
TEST_CASE("first larger with the same element", "[algorithm]") {
std::vector<int> vec = {7, 8, 9, 10, 11};
REQUIRE(aare::first_larger(vec, 9) == 3);
}
TEST_CASE("cumsum works", "[algorithm]") {
std::vector<double> vec = {0, 1, 2, 3, 4};
auto result = aare::cumsum(vec);
REQUIRE(result.size() == vec.size());
REQUIRE(result[0] == 0);
REQUIRE(result[1] == 1);
REQUIRE(result[2] == 3);
REQUIRE(result[3] == 6);
REQUIRE(result[4] == 10);
}
TEST_CASE("cumsum works with empty vector", "[algorithm]") {
std::vector<double> vec = {};
auto result = aare::cumsum(vec);
REQUIRE(result.size() == 0);
}
TEST_CASE("cumsum works with negative numbers", "[algorithm]") {
std::vector<double> vec = {0, -1, -2, -3, -4};
auto result = aare::cumsum(vec);
REQUIRE(result.size() == vec.size());
REQUIRE(result[0] == 0);
REQUIRE(result[1] == -1);
REQUIRE(result[2] == -3);
REQUIRE(result[3] == -6);
REQUIRE(result[4] == -10);
}