mirror of
https://github.com/slsdetectorgroup/aare.git
synced 2025-06-06 12:50:40 +02:00
python works
This commit is contained in:
parent
b23e697e26
commit
09de69c090
@ -1,29 +0,0 @@
|
|||||||
# Findh5py.cmake
|
|
||||||
#
|
|
||||||
# This module finds if h5py is installed and sets the H5PY_FOUND variable.
|
|
||||||
# It also sets the H5PY_INCLUDE_DIRS and H5PY_LIBRARIES variables.
|
|
||||||
|
|
||||||
find_package(PythonInterp REQUIRED)
|
|
||||||
find_package(PythonLibs REQUIRED)
|
|
||||||
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${PYTHON_EXECUTABLE} -c "import h5py"
|
|
||||||
RESULT_VARIABLE H5PY_IMPORT_RESULT
|
|
||||||
OUTPUT_QUIET
|
|
||||||
ERROR_QUIET
|
|
||||||
)
|
|
||||||
|
|
||||||
if(H5PY_IMPORT_RESULT EQUAL 0)
|
|
||||||
set(H5PY_FOUND TRUE)
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${PYTHON_EXECUTABLE} -c "import h5py; print(h5py.get_include())"
|
|
||||||
OUTPUT_VARIABLE H5PY_INCLUDE_DIR
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
|
||||||
)
|
|
||||||
set(H5PY_INCLUDE_DIRS ${H5PY_INCLUDE_DIR})
|
|
||||||
set(H5PY_LIBRARIES ${PYTHON_LIBRARIES})
|
|
||||||
else()
|
|
||||||
set(H5PY_FOUND FALSE)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
mark_as_advanced(H5PY_INCLUDE_DIRS H5PY_LIBRARIES)
|
|
@ -34,30 +34,22 @@ set( PYTHON_FILES
|
|||||||
aare/utils.py
|
aare/utils.py
|
||||||
)
|
)
|
||||||
|
|
||||||
# Conditionally add HDF5-related Python files
|
|
||||||
# HDF5
|
|
||||||
# if (AARE_HDF5)
|
|
||||||
# find_package(h5py REQUIRED)
|
|
||||||
|
|
||||||
# find_package(HDF5 1.10 COMPONENTS CXX REQUIRED)
|
#HDF5
|
||||||
# add_definitions(
|
if (AARE_HDF5)
|
||||||
# ${HDF5_DEFINITIONS}
|
find_package(HDF5 1.10 COMPONENTS CXX REQUIRED)
|
||||||
# )
|
add_definitions(
|
||||||
# list(APPEND PYTHON_FILES
|
${HDF5_DEFINITIONS}
|
||||||
# aare/Hdf5File.py
|
)
|
||||||
# )
|
list(APPEND PYTHON_FILES
|
||||||
# if(HDF5_FOUND)
|
aare/Hdf5File.py
|
||||||
# target_sources(_aare PRIVATE
|
)
|
||||||
# ${CMAKE_CURRENT_SOURCE_DIR}/src/Hdf5File.cpp
|
if(HDF5_FOUND)
|
||||||
# ${CMAKE_CURRENT_SOURCE_DIR}/src/Hdf5MasterFile.cpp
|
add_definitions(-DHDF5_FOUND)
|
||||||
# )
|
target_link_libraries(_aare PUBLIC ${HDF5_LIBRARIES})
|
||||||
# target_link_libraries(_aare PUBLIC ${HDF5_LIBRARIES})
|
target_include_directories(_aare PUBLIC ${HDF5_INCLUDE_DIRS})
|
||||||
# target_include_directories(_aare PUBLIC ${HDF5_INCLUDE_DIRS})
|
endif()
|
||||||
# endif()
|
endif()
|
||||||
# if(H5PY_FOUND)
|
|
||||||
# set(H5PY_INCLUDE_DIRS ${H5PY_INCLUDE_DIR})
|
|
||||||
# set(H5PY_LIBRARIES ${PYTHON_LIBRARIES})
|
|
||||||
# endif()
|
|
||||||
|
|
||||||
# Copy the python files to the build directory
|
# Copy the python files to the build directory
|
||||||
foreach(FILE ${PYTHON_FILES})
|
foreach(FILE ${PYTHON_FILES})
|
||||||
|
@ -2,31 +2,15 @@
|
|||||||
from . import _aare
|
from . import _aare
|
||||||
|
|
||||||
|
|
||||||
from ._aare import File, RawMasterFile, RawSubFile
|
from ._aare import File, RawMasterFile, RawSubFile, Hdf5MasterFile
|
||||||
from ._aare import Pedestal, ClusterFinder, VarClusterFinder
|
from ._aare import Pedestal, ClusterFinder, VarClusterFinder
|
||||||
from ._aare import DetectorType
|
from ._aare import DetectorType
|
||||||
from ._aare import ClusterFile
|
from ._aare import ClusterFile
|
||||||
|
|
||||||
from .CtbRawFile import CtbRawFile
|
from .CtbRawFile import CtbRawFile
|
||||||
from .RawFile import RawFile
|
from .RawFile import RawFile
|
||||||
|
from .Hdf5File import Hdf5File
|
||||||
from .ScanParameters import ScanParameters
|
from .ScanParameters import ScanParameters
|
||||||
|
|
||||||
from .utils import random_pixels, random_pixel
|
from .utils import random_pixels, random_pixel
|
||||||
|
|
||||||
try:
|
|
||||||
import h5py
|
|
||||||
HDF5_FOUND = True
|
|
||||||
except ImportError:
|
|
||||||
HDF5_FOUND = False
|
|
||||||
|
|
||||||
if HDF5_FOUND:
|
|
||||||
from ._aare import Hdf5MasterFile
|
|
||||||
from .Hdf5File import Hdf5File
|
|
||||||
else:
|
|
||||||
class Hdf5MasterFile:
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
raise ImportError("h5py library not found. HDF5 Master File is not available.")
|
|
||||||
|
|
||||||
class Hdf5File:
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
raise ImportError("h5py library not found. HDF5 File is not available.")
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#include "H5Cpp.h"
|
||||||
#include "aare/File.hpp"
|
#include "aare/File.hpp"
|
||||||
#include "aare/Frame.hpp"
|
#include "aare/Frame.hpp"
|
||||||
#include "aare/Hdf5File.hpp"
|
#include "aare/Hdf5File.hpp"
|
||||||
|
@ -79,8 +79,8 @@ void define_hdf5_master_file_bindings(py::module &m) {
|
|||||||
&Hdf5MasterFile::transceiver_samples)
|
&Hdf5MasterFile::transceiver_samples)
|
||||||
.def_property_readonly("number_of_rows",
|
.def_property_readonly("number_of_rows",
|
||||||
&Hdf5MasterFile::number_of_rows)
|
&Hdf5MasterFile::number_of_rows)
|
||||||
.def_property_readonly("quad", &Hdf5MasterFile::quad)
|
.def_property_readonly("quad", &Hdf5MasterFile::quad);
|
||||||
.def_property_readonly("scan_parameters",
|
//.def_property_readonly("scan_parameters",
|
||||||
&Hdf5MasterFile::scan_parameters)
|
// &Hdf5MasterFile::scan_parameters)
|
||||||
.def_property_readonly("roi", &Hdf5MasterFile::roi);
|
//.def_property_readonly("roi", &Hdf5MasterFile::roi);
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ void Hdf5File::read_into(std::byte *image_buf, size_t n_frames,
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
size_t Hdf5File::n_mod() const { return 1; } // n_subfile_parts; }
|
size_t Hdf5File::n_mod() const { return 1; }
|
||||||
|
|
||||||
size_t Hdf5File::bytes_per_frame() {
|
size_t Hdf5File::bytes_per_frame() {
|
||||||
return m_rows * m_cols * m_master.bitdepth() / 8;
|
return m_rows * m_cols * m_master.bitdepth() / 8;
|
||||||
@ -149,7 +149,7 @@ void Hdf5File::get_frame_into(size_t frame_index, std::byte *frame_buffer,
|
|||||||
|
|
||||||
void Hdf5File::get_data_into(size_t frame_index, std::byte *frame_buffer) {
|
void Hdf5File::get_data_into(size_t frame_index, std::byte *frame_buffer) {
|
||||||
m_data_file->get_frame_into(frame_index, frame_buffer);
|
m_data_file->get_frame_into(frame_index, frame_buffer);
|
||||||
fmt::print("Read 2D data for frame {}\n", frame_index);
|
//fmt::print("Read 2D data for frame {}\n", frame_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Hdf5File::get_header_into(size_t frame_index, DetectorHeader *header) {
|
void Hdf5File::get_header_into(size_t frame_index, DetectorHeader *header) {
|
||||||
@ -218,8 +218,8 @@ void Hdf5File::open_data_file() {
|
|||||||
m_total_frames = m_data_file->dims[0];
|
m_total_frames = m_data_file->dims[0];
|
||||||
m_rows = m_data_file->dims[1];
|
m_rows = m_data_file->dims[1];
|
||||||
m_cols = m_data_file->dims[2];
|
m_cols = m_data_file->dims[2];
|
||||||
fmt::print("Data Dataset dimensions: frames = {}, rows = {}, cols = {}\n",
|
//fmt::print("Data Dataset dimensions: frames = {}, rows = {}, cols = {}\n",
|
||||||
m_total_frames, m_rows, m_cols);
|
// m_total_frames, m_rows, m_cols);
|
||||||
} catch (const H5::Exception &e) {
|
} catch (const H5::Exception &e) {
|
||||||
m_data_file.reset();
|
m_data_file.reset();
|
||||||
fmt::print("Exception type: {}\n", typeid(e).name());
|
fmt::print("Exception type: {}\n", typeid(e).name());
|
||||||
@ -236,8 +236,8 @@ void Hdf5File::open_header_files() {
|
|||||||
try {
|
try {
|
||||||
for (size_t i = 0; i != header_dataset_names.size(); ++i) {
|
for (size_t i = 0; i != header_dataset_names.size(); ++i) {
|
||||||
m_header_files.push_back(std::make_unique<H5Handles>(m_master.master_fname().string(), metadata_group_name + header_dataset_names[i], 1));
|
m_header_files.push_back(std::make_unique<H5Handles>(m_master.master_fname().string(), metadata_group_name + header_dataset_names[i], 1));
|
||||||
fmt::print("{} Dataset dimensions: size = {}\n",
|
//fmt::print("{} Dataset dimensions: size = {}\n",
|
||||||
header_dataset_names[i], m_header_files[i]->dims[0]);
|
// header_dataset_names[i], m_header_files[i]->dims[0]);
|
||||||
}
|
}
|
||||||
} catch (const H5::Exception &e) {
|
} catch (const H5::Exception &e) {
|
||||||
m_header_files.clear();
|
m_header_files.clear();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user