fixed size read bug and added tests
This commit is contained in:
@@ -61,25 +61,33 @@ static PyObject *ClusterFileReader_read(ClusterFileReader *self,
|
||||
|
||||
const int ndim = 1;
|
||||
Py_ssize_t size = 0;
|
||||
PyObject *noise_obj;
|
||||
if (!PyArg_ParseTuple(args, "nO", &size, &noise_obj)) {
|
||||
PyObject *noise_obj = NULL;
|
||||
PyObject *noise_array = NULL;
|
||||
if (!PyArg_ParseTuple(args, "n|O", &size, &noise_obj)) {
|
||||
PyErr_SetString(PyExc_TypeError, "Could not parse args.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
npy_intp dims[] = {size};
|
||||
|
||||
// Create two numpy arrays from the passed objects, if possible numpy will
|
||||
// If possible numpy will
|
||||
// use the underlying buffer, otherwise it will create a copy, for example
|
||||
// if data type is different or we pass in a list. The
|
||||
// NPY_ARRAY_C_CONTIGUOUS flag ensures that we have contiguous memory.
|
||||
PyObject *noise_array =
|
||||
PyArray_FROM_OTF(noise_obj, NPY_DOUBLE, NPY_ARRAY_C_CONTIGUOUS);
|
||||
|
||||
|
||||
|
||||
#ifdef CR_VERBOSE
|
||||
printf("Getting ready to read: %lu clusters. Noise map: %p\n", size,
|
||||
noise_obj);
|
||||
#endif
|
||||
|
||||
// If the user passed a noise map we fetch a pointer to that array as well
|
||||
int nx = 0, ny = 0;
|
||||
double *noise_map = NULL;
|
||||
|
||||
// If parsing of a or b fails we throw an exception in Python
|
||||
if (noise_array) {
|
||||
if (noise_obj) {
|
||||
noise_array =
|
||||
PyArray_FROM_OTF(noise_obj, NPY_DOUBLE, NPY_ARRAY_C_CONTIGUOUS);
|
||||
|
||||
int ndim_noise = PyArray_NDIM((PyArrayObject *)(noise_array));
|
||||
npy_intp *noise_shape = PyArray_SHAPE((PyArrayObject *)(noise_array));
|
||||
@@ -126,10 +134,10 @@ static PyObject *ClusterFileReader_read(ClusterFileReader *self,
|
||||
// Here goes the looping, removing frame numbers etc.
|
||||
int n_read = 0;
|
||||
if (noise_map)
|
||||
read_clusters_with_cut(self->fp, size, buf, &self->n_left, noise_map,
|
||||
n_read = read_clusters_with_cut(self->fp, size, buf, &self->n_left, noise_map,
|
||||
nx, ny);
|
||||
else
|
||||
read_clusters(self->fp, size, buf, &self->n_left);
|
||||
n_read = read_clusters(self->fp, size, buf, &self->n_left);
|
||||
|
||||
if (n_read != size) {
|
||||
// resize the array to match the number of read photons
|
||||
|
||||
Reference in New Issue
Block a user