fixed size read bug and added tests

This commit is contained in:
Erik Frojdh 2023-06-04 18:57:49 +02:00
parent d9828dd9a3
commit b8a8891c8d
3 changed files with 61 additions and 13 deletions

@ -61,25 +61,33 @@ static PyObject *ClusterFileReader_read(ClusterFileReader *self,
const int ndim = 1;
Py_ssize_t size = 0;
PyObject *noise_obj;
if (!PyArg_ParseTuple(args, "nO", &size, &noise_obj)) {
PyObject *noise_obj = NULL;
PyObject *noise_array = NULL;
if (!PyArg_ParseTuple(args, "n|O", &size, &noise_obj)) {
PyErr_SetString(PyExc_TypeError, "Could not parse args.");
return NULL;
}
npy_intp dims[] = {size};
// Create two numpy arrays from the passed objects, if possible numpy will
// If possible numpy will
// use the underlying buffer, otherwise it will create a copy, for example
// if data type is different or we pass in a list. The
// NPY_ARRAY_C_CONTIGUOUS flag ensures that we have contiguous memory.
PyObject *noise_array =
PyArray_FROM_OTF(noise_obj, NPY_DOUBLE, NPY_ARRAY_C_CONTIGUOUS);
#ifdef CR_VERBOSE
printf("Getting ready to read: %lu clusters. Noise map: %p\n", size,
noise_obj);
#endif
// If the user passed a noise map we fetch a pointer to that array as well
int nx = 0, ny = 0;
double *noise_map = NULL;
// If parsing of a or b fails we throw an exception in Python
if (noise_array) {
if (noise_obj) {
noise_array =
PyArray_FROM_OTF(noise_obj, NPY_DOUBLE, NPY_ARRAY_C_CONTIGUOUS);
int ndim_noise = PyArray_NDIM((PyArrayObject *)(noise_array));
npy_intp *noise_shape = PyArray_SHAPE((PyArrayObject *)(noise_array));
@ -126,10 +134,10 @@ static PyObject *ClusterFileReader_read(ClusterFileReader *self,
// Here goes the looping, removing frame numbers etc.
int n_read = 0;
if (noise_map)
read_clusters_with_cut(self->fp, size, buf, &self->n_left, noise_map,
n_read = read_clusters_with_cut(self->fp, size, buf, &self->n_left, noise_map,
nx, ny);
else
read_clusters(self->fp, size, buf, &self->n_left);
n_read = read_clusters(self->fp, size, buf, &self->n_left);
if (n_read != size) {
// resize the array to match the number of read photons

@ -85,10 +85,33 @@ def test_read_file_with_37_frames(data_path):
def test_read_file_with_37_frames_in_chunks(data_path):
#File shoud contain 37 frames with 5 clusters each
#Full spec in utils/write_test_data.py
fname= (data_path/'37frames_with_5_clusters.clust').as_posix()
fname= data_path/'37frames_with_5_clusters.clust'
r = ClusterFileReader(fname)
total_clusters = 0
while (clusters:=r.read(7)).size:
total_clusters += clusters.size
assert total_clusters == 185
def test_read_file_with_noise_mask(data_path):
#No mask
fname= data_path/'noise_test.clust'
r = ClusterFileReader(fname)
cl = r.read(85) #file contains 70 clusters
assert cl.size == 70
#noise mask with zeros
noise_cut = np.zeros((400,400))
r = ClusterFileReader(fname)
cl = r.read(85, noise_cut)
assert cl.size == 70
#only pixel 80, 133 above noise
noise_cut[:] = 100
#TODO! Agree on orientation of noise mask!
# noise_cut[80,133] = 0
noise_cut[133,80] = 0
r = ClusterFileReader(fname)
cl = r.read(85, noise_cut)
assert cl.size == 10

@ -40,4 +40,21 @@ with open(path/'37frames_with_5_clusters.clust', 'wb') as f:
data['data'] = np.arange(j,j+9)
print(data)
data.tofile(f)
#Writing out data to test noise cuts
header[1] = 7
with open(path/'noise_test.clust', 'wb') as f:
for i in range(10):
data['x'] = 50
data['y'] = 133
data['data'][:] = 50
header.tofile(f)
print(header)
header[0] += 1
for j in range(7):
print(data)
data.tofile(f)
data['x'] += 10