mirror of
https://github.com/slsdetectorgroup/aare.git
synced 2026-01-22 13:41:58 +01:00
dont convert to byte
This commit is contained in:
@@ -236,8 +236,7 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
|
||||
uint32_t nn = m_num_left;
|
||||
uint32_t nph = m_num_left; // number of clusters in frame needs to be 4
|
||||
|
||||
// auto buf = reinterpret_cast<Cluster3x3 *>(clusters.data());
|
||||
auto buf = reinterpret_cast<char*>(clusters.data());
|
||||
auto buf = clusters.data();
|
||||
// if there are photons left from previous frame read them first
|
||||
if (nph) {
|
||||
if (nph > n_clusters) {
|
||||
@@ -247,8 +246,7 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
|
||||
} else {
|
||||
nn = nph;
|
||||
}
|
||||
nph_read += fread((buf + nph_read * clusters.item_size()),
|
||||
clusters.item_size(), nn, fp);
|
||||
nph_read += fread((buf + nph_read), clusters.item_size(), nn, fp);
|
||||
m_num_left = nph - nn; // write back the number of photons left
|
||||
}
|
||||
|
||||
@@ -263,8 +261,8 @@ ClusterFile<ClusterType, Enable>::read_clusters_without_cut(size_t n_clusters) {
|
||||
else
|
||||
nn = nph;
|
||||
|
||||
nph_read += fread((buf + nph_read * clusters.item_size()),
|
||||
clusters.item_size(), nn, fp);
|
||||
nph_read +=
|
||||
fread((buf + nph_read), clusters.item_size(), nn, fp);
|
||||
m_num_left = nph - nn;
|
||||
}
|
||||
if (nph_read >= n_clusters)
|
||||
|
||||
@@ -297,7 +297,7 @@ class ClusterVector<Cluster<T, ClusterSizeX, ClusterSizeY, CoordType>> {
|
||||
* @param frame_number frame number of the clusters. Default is 0, which is
|
||||
* also used to indicate that the clusters come from many frames
|
||||
*/
|
||||
ClusterVector(size_t capacity = 1024, uint64_t frame_number = 0)
|
||||
ClusterVector(size_t capacity = 300, uint64_t frame_number = 0)
|
||||
: m_frame_number(frame_number) {
|
||||
m_data.reserve(capacity);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,9 @@ using pd_type = double;
|
||||
|
||||
using namespace aare;
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wunused-parameter"
|
||||
|
||||
template <typename Type, uint8_t ClusterSizeX, uint8_t ClusterSizeY,
|
||||
typename CoordType>
|
||||
void define_cluster(py::module &m, const std::string &typestr) {
|
||||
@@ -80,12 +83,13 @@ void define_cluster_vector(py::module &m, const std::string &typestr) {
|
||||
self.push_back(cluster);
|
||||
})
|
||||
|
||||
|
||||
// implement push_back
|
||||
.def_property_readonly("size", &ClusterVector<ClusterType>::size)
|
||||
.def("item_size", &ClusterVector<ClusterType>::item_size)
|
||||
.def_property_readonly("fmt",
|
||||
[typestr](ClusterVector<ClusterType> &self) { return fmt_format<ClusterType>; })
|
||||
[typestr](ClusterVector<ClusterType> &self) {
|
||||
return fmt_format<ClusterType>;
|
||||
})
|
||||
|
||||
.def_property_readonly("cluster_size_x",
|
||||
&ClusterVector<ClusterType>::cluster_size_x)
|
||||
@@ -269,3 +273,4 @@ void define_cluster_finder_bindings(py::module &m, const std::string &typestr) {
|
||||
return hitmap;
|
||||
});
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
|
||||
@@ -146,7 +146,6 @@ TEST_CASE("Read clusters from single frame file", "[.files]") {
|
||||
// [ 96 295] [855 856 857 858 859 860 861 862 863]
|
||||
// [ 97 296] [864 865 866 867 868 869 870 871 872]
|
||||
|
||||
|
||||
auto fpath = test_data_path() / "clust" / "single_frame_97_clustrers.clust";
|
||||
REQUIRE(std::filesystem::exists(fpath));
|
||||
|
||||
@@ -174,11 +173,10 @@ TEST_CASE("Read clusters from single frame file", "[.files]") {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
TEST_CASE("Read clusters", "[.files]") {
|
||||
// beam_En700eV_-40deg_300V_10us_d0_f0_100.clust
|
||||
auto fpath = test_data_path() / "clust" / "beam_En700eV_-40deg_300V_10us_d0_f0_100.clust";
|
||||
auto fpath = test_data_path() / "clust" /
|
||||
"beam_En700eV_-40deg_300V_10us_d0_f0_100.clust";
|
||||
REQUIRE(std::filesystem::exists(fpath));
|
||||
|
||||
ClusterFile<Cluster<int32_t, 3, 3>> f(fpath);
|
||||
|
||||
Reference in New Issue
Block a user