mirror of
https://github.com/paulscherrerinstitute/sf_daq_buffer.git
synced 2026-05-10 05:02:03 +02:00
Rename function to create a chunked dataset
This commit is contained in:
@@ -140,8 +140,8 @@ void H5Writer::write_data(const string& dataset_name, const size_t data_index, c
|
||||
}
|
||||
}
|
||||
|
||||
void H5Writer::create_dataset(const string& dataset_name, const vector<size_t>& data_shape,
|
||||
const string& data_type, const string& endianness)
|
||||
void H5Writer::create_chunked_dataset(const string& dataset_name, const vector<size_t>& data_shape,
|
||||
const string& data_type, const string& endianness, hize_t max_dataset_size)
|
||||
{
|
||||
// Number of dimensions in each data point.
|
||||
const size_t data_rank = data_shape.size();
|
||||
@@ -169,7 +169,7 @@ void H5Writer::create_dataset(const string& dataset_name, const vector<size_t>&
|
||||
#ifdef DEBUG_OUTPUT
|
||||
using namespace date;
|
||||
cout << "[" << std::chrono::system_clock::now() << "]";
|
||||
cout << "[H5Writer::create_dataset] Creating dataspace of size (";
|
||||
cout << "[H5Writer::create_chunked_dataset] Creating dataspace of size (";
|
||||
for (hsize_t i=0; i<dataset_rank; ++i) {
|
||||
cout << dataset_dimension[i] << ",";
|
||||
}
|
||||
@@ -287,7 +287,7 @@ hsize_t H5Writer::prepare_storage_for_data(const string& dataset_name, const siz
|
||||
|
||||
// Create the dataset if we don't have it yet.
|
||||
if (datasets.find(dataset_name) == datasets.end()) {
|
||||
create_dataset(dataset_name, data_shape, data_type, endianness);
|
||||
create_chunked_dataset(dataset_name, data_shape, data_type, endianness);
|
||||
}
|
||||
|
||||
hsize_t current_dataset_size = datasets_current_size.at(dataset_name);
|
||||
|
||||
@@ -28,7 +28,7 @@ class H5Writer
|
||||
hsize_t prepare_storage_for_data(const std::string& dataset_name, const size_t data_index, const std::vector<size_t>& data_shape,
|
||||
const std::string& data_type, const std::string& endianness);
|
||||
|
||||
void create_dataset(const std::string& dataset_name, const std::vector<size_t>& data_shape,
|
||||
void create_chunked_dataset(const std::string& dataset_name, const std::vector<size_t>& data_shape,
|
||||
const std::string& data_type, const std::string& endianness);
|
||||
|
||||
size_t get_relative_data_index(const size_t data_index);
|
||||
@@ -41,6 +41,8 @@ class H5Writer
|
||||
virtual void close_file();
|
||||
virtual void write_data(const std::string& dataset_name, const size_t data_index, const char* data, const std::vector<size_t>& data_shape,
|
||||
const size_t data_bytes_size, const std::string& data_type, const std::string& endianness);
|
||||
virtual void write_dataset(const std::string& dataset_name, const size_t data_index, const char* data, const std::vector<size_t>& data_shape,
|
||||
const size_t data_bytes_size, const std::string& data_type, const std::string& endianness);
|
||||
virtual H5::H5File& get_h5_file();
|
||||
virtual bool is_data_for_current_file(const size_t data_index);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user