Streak Finder algorithm for CBD experiment #2

Merged
augustin_s merged 46 commits from ext-dorofe_e/dap:chapman into main 2025-07-14 11:18:07 +02:00
7 changed files with 7 additions and 150 deletions
Showing only changes of commit 76b8d957dd - Show all commits

View File

@@ -105,20 +105,6 @@ options:
* `'spot_x/spot_y/spot_intensity': 3*list[float]` - Provides coordinates and intensity of the identified peaks within the frame.
* `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified peaks exceeding the defined threshold.
* **White field correction Algorithm**
Does the IN PLACE white field correction of the image
Input parameters:
* `'do_whitefield_correction': 1/0` - Specifies whether to do in-place white field correction.
* `'wf_data_file': str` - Path to the hdf5 file with corrected white field image.
* `'wf_dataset': str` [Optional] - Name of the dataset containing white field image in the hdf5 file, default is `"data/data"`.
* `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported.
Algorithm Output:
* `'white_field_correction_applied': 1/0` - Indicates whether the algorithm ran successfully.
* Image is changed **in-place**.
* **streakfinder Algorithm**
This algorithm is using [streak-finder package](https://github.com/simply-nicky/streak_finder) - a connection-based streak finding algorithm for convergent beam diffraction patterns.
@@ -234,18 +220,6 @@ options:
Use the `'apply_additional_mask': 0/1` - Input flag to enable this functionality.
* **Additional Mask from file**
Alternative to previous additional masking, mask data is read from specified file. NumPy and HDF5 formats are supported.
Input parameters:
* `'apply_additional_mask_from_file': 1/0` - Input flag to enable this functionality.
* `'mask_file': str` - Path to the hdf5 file with mask data.
* `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"data/data"`.
Algorithm Output:
* `'mask_from_file_applied': 1/0` - Indicates whether the algorithm ran successfully.
* **Filter based on pulse picker information**
If the event propagation capability is accessible for the detector and the pulse picker information is correctly configured for propagation, the filtration based on pulse picker information becomes feasible by using the

View File

@@ -1,6 +1,5 @@
from .addmask import calc_apply_additional_mask
from .addmaskfile import calc_apply_additional_mask_from_file
from .aggregation import calc_apply_aggregation
from .jfdata import JFData
from .mask import calc_mask_pixels
@@ -9,7 +8,6 @@ from .radprof import calc_radial_integration
from .roi import calc_roi
from .spiana import calc_spi_analysis
from .streakfind import calc_streakfinder_analysis
from .whitefield_correction import calc_apply_whitefield_correction
from .thresh import calc_apply_threshold

View File

@@ -1,35 +0,0 @@
import h5py
import numpy as np
def calc_apply_additional_mask_from_file(results, pixel_mask_pf):
apply_additional_mask = results.get("apply_additional_mask_from_file", False)
if not apply_additional_mask:
return
results["mask_from_file_applied"] = 0
mask_file = results.get("mask_file", None)
if not mask_file:
return
mask_dataset = results.get("mask_ds", "data/data")
# Support for hdf5 and npy
if mask_file.endswith(".npy"):
try:
mask = np.asarray(np.load(mask_file), dtype=bool)
except Exception as error:
print(f"Error loading mask data from NumPy file {mask_file}:\n{error}")
return
else:
try:
with h5py.File(mask_file, "r") as mask_file:
mask = np.asarray(mask_file[mask_dataset], dtype=bool)
except Exception as error:
print(f"Error loading mask from hdf5 file {mask_file}:\n{error}")
return
try:
np.multiply(pixel_mask_pf, mask, out=pixel_mask_pf)
except Exception as error:
print(f"Error applying additional mask from file {mask_file}:\n{error}")
else:
results["mask_from_file_applied"] = 1

View File

@@ -3,7 +3,6 @@ import numpy as np
import jungfrau_utils as ju
from .addmask import calc_apply_additional_mask
from .addmaskfile import calc_apply_additional_mask_from_file
class JFData:
@@ -59,7 +58,6 @@ class JFData:
pixel_mask_pf = np.ascontiguousarray(pixel_mask_corrected)
calc_apply_additional_mask(results, pixel_mask_pf) # changes pixel_mask_pf in place
calc_apply_additional_mask_from_file(results, pixel_mask_pf) # changes pixel_mask_pf in place
self.id_pixel_mask_corrected = new_id_pixel_mask_corrected
self.pixel_mask_pf = pixel_mask_pf

View File

@@ -100,14 +100,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData):
"sf_npts",
"sf_xtol",
"sf_nfa",
"sf_num_threads",
# "beam_center_x",
# "beam_center_y"
]
if not all([param in results.keys() for param in params_required]):
print(f"ERROR: Not enough parameters for streak finder analysis. Skipping\n"
print(f"ERROR: Not enough parameters for streak finder analysis. Skipping.\n"
f"{params_required=}")
return

View File

@@ -1,74 +0,0 @@
import numpy as np
import h5py
def _div(image, whitefield):
np.divide(
image,
whitefield,
out=image,
where=whitefield != 0
)
def _sub(image, whitefield):
np.subtract(
image,
whitefield,
out=image,
)
WF_METHODS = {
"div": _div,
"sub": _sub
}
def calc_apply_whitefield_correction(results, data):
"""
In-place white field correction of the detector data
"""
do_whitefield_correction = results.get("do_whitefield_correction", False)
if not do_whitefield_correction:
return
results["white_field_correction_applied"] = 0
params_required = [
"wf_data_file",
"wf_method",
]
if not all([param in results.keys() for param in params_required]):
print(f"ERROR: Not enough parameters for whitefield correction. Skipping\n"
f"{params_required=}")
return
wf_data_file = results["wf_data_file"]
wf_method = results["wf_method"]
if wf_method not in WF_METHODS.keys():
print(f"ERROR: Unknown whitefield correction method {wf_method}. Skipping\n"
f"{params_required=}")
return
wf_dataset = results.get("wf_dataset", "data/data")
# TODO: cache white field data, only reload if file changed
# maybe store checksum in results as "_checksum"
try:
with h5py.File(wf_data_file, "r") as wfile:
whitefield_image = np.asarray(wfile[wf_dataset])
except Exception as error:
print(f"ERROR: Can't read whitefield from file {wf_data_file}. Skipping\n"
f"{error=}")
return
try:
WF_METHODS[wf_method](data, whitefield_image)
except Exception as error:
print(f"ERROR: White field correction failed.\n"
f"{error=}")
else:
results["white_field_correction_applied"] = 1
return whitefield_image

View File

@@ -2,8 +2,9 @@ import argparse
import numpy as np
from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis,
calc_apply_whitefield_correction, calc_streakfinder_analysis, JFData)
from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis,
calc_radial_integration, calc_roi, calc_spi_analysis,
calc_streakfinder_analysis, JFData)
from utils import Aggregator, BufferedJSON, randskip, read_bit
from zmqsocks import ZMQSockets
@@ -117,11 +118,9 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host
# ???
# White-field correction and streak finder processing for convergent-beam diffraction
print(f"Applying whitefield correction")
calc_apply_whitefield_correction(results, image) # changes image in place
print(f"Searching streaks")
image = calc_streakfinder_analysis(results, image, pixel_mask_pf) # changes image in place is do_snr=True
# Streak finder processing for convergent-beam diffraction experiments
# changes image and mask in place if do_snr=True in parameters file
image = calc_streakfinder_analysis(results, image, pixel_mask_pf)
print(f"Done\n{results=}")
image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator)