Added streak finding and white field correction for convergent-beam diffraction experiments;

TODO: cleanup, document streakfinder installation or add to setup
This commit is contained in:
2025-06-24 08:13:28 +02:00
parent f47e1bbb88
commit cb2c6d5ab2
5 changed files with 172 additions and 1 deletions

View File

@@ -105,6 +105,40 @@ options:
* `'spot_x/spot_y/spot_intensity': 3*list[float]` - Provides coordinates and intensity of the identified peaks within the frame.
* `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified peaks exceeding the defined threshold.
* **White field correction Algorithm**
Does the IN PLACE white field correction of the image
Input parameters:
* `'do_whitefield_correction': 1/0` - Specifies whether to do in-place white field correction.
* `'wf_data_file': str` - Path to the hdf5 file with corrected white field image.
* `'wf_dataset': str` - Name of the dataset containing white field image in the hdf5 file.
* `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported.
Algorithm Output:
* `'is_white_field_corrected': bool` - Indicates whether white field correction took place.
* Image is changed **in-place**.
* **streakfinder Algorithm**
This algorithm is using [streak-finder package](https://github.com/simply-nicky/streak_finder) - a connection-based streak finding algorithm for convergent beam diffraction patterns.
Input parameters:
* `'do_streakfinder_analysis': 1/0` - Specifies whether to execute the streak-finder algorithm.
* `'sf_structure_radius': int` - Connectivity structure radius.
* `'sf_structure_rank': int` - Connectivity structure rank.
* `'sf_min_size': float` - Minimum number of linelets required in a detected streak.
* `'sf_vmin': float` - Peak threshold. All peaks with values lower than ``sf_vmin`` are discarded.
* `'sf_npts': int` - Support size threshold. The support structure is a connected set of pixels which
value is above the threshold ``sf_vmin``. A peak is discarded is the size of support
set is lower than ``sf_npts``.
* `'sf_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the
streak is no more than ``sf_xtol``.
Algorithm Output:
* `'number_of_streaks': int` - Indicates the count of identified peaks.
* `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1
* **Radial Profile Integration**
This algorithm integrates pixel intensities radially based on defined parameters.
@@ -242,3 +276,4 @@ Algorithms use input parameters specified in a JSON file provided to worker.py (
Special thanks to Valerio Mariani for providing the cython implementation of peakfinder8.
Special thanks to Nikolai Ivanov for providing the cython implementation of streak-finder.

View File

@@ -7,6 +7,8 @@ from .peakfind import calc_peakfinder_analysis
from .radprof import calc_radial_integration
from .roi import calc_roi
from .spiana import calc_spi_analysis
from .streakfind import calc_streakfinder_analysis
from .whitefield_correction import calc_apply_whitefield_correction
from .thresh import calc_apply_threshold

63
dap/algos/streakfind.py Normal file
View File

@@ -0,0 +1,63 @@
"""
Streak Finder algorithm implemented by CFEL Chapman group
Requires Convergent beam streak finder package installed:
https://github.com/simply-nicky/streak_finder
(note g++ 11 required for building)
"""
from streak_finder import PatternStreakFinder
from streak_finder.label import Structure2D
def calc_streakfinder_analysis(results, data, pixel_mask_sf):
do_streakfinder_analysis = results.get("do_streakfinder_analysis", False)
if not do_streakfinder_analysis:
print(f"No streak finder analysis")
return
params_required = [
"sf_structure_radius",
"sf_structure_rank",
"sf_min_size",
"sf_vmin",
"sf_npts",
"sf_xtol"
]
if not all([param in results.keys() for param in params_required]):
print(f"ERROR: Not enough parameters for streak finder analysis. Skipping\n"
f"{params_required=}")
return
radius = results["sf_structure_radius"]
rank = results["sf_structure_rank"]
min_size = results["sf_min_size"]
vmin = results["sf_vmin"]
npts = results["sf_npts"]
xtol = results["sf_xtol"]
struct = Structure2D(radius, rank)
psf = PatternStreakFinder(
data=data,
mask=pixel_mask_sf,
structure=struct,
min_size=min_size
)
# Find peaks in a pattern. Returns a sparse set of peaks which values are above a threshold
# ``vmin`` that have a supporing set of a size larger than ``npts``. The minimal distance
# between peaks is ``2 * structure.radius``
peaks = psf.detect_peaks(vmin=vmin, npts=npts)
# Streak finding algorithm. Starting from the set of seed peaks, the lines are iteratively
# extended with a connectivity structure.
streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin)
streak_lines = streaks.to_lines()
_, number_of_streaks = streak_lines.shape
print(f"Found {number_of_streaks} streaks")
list_result = []
for line in streak_lines: # arr(4, n_lines); 0coord x0, y0, x1, y1
list_result.append(line.tolist())
results.update({"number_of_streaks": number_of_streaks})
results.update({"streaks": list_result})

View File

@@ -0,0 +1,62 @@
import numpy as np
import h5py
def _div(image, whitefield):
image = np.divide(
image,
whitefield,
out=np.zeros_like(image),
where=whitefield != 0
)
return image
def _sub(image, whitefield):
image -= whitefield
return image
WF_METHODS = {
"div": _div,
"sub": _sub
}
def calc_apply_whitefield_correction(results, data):
"""
In-place white field correction of the detector data
"""
results["is_white_field_corrected"] = False
do_whitefield_correction = results.get("do_whitefield_correction", False)
if not do_whitefield_correction:
print(f"No whitefield correction")
return
params_required = [
"wf_data_file",
"wf_method",
]
if not all([param in results.keys() for param in params_required]):
print(f"ERROR: Not enough parameters for whitefield correction. Skipping\n"
f"{params_required=}")
return
wf_data_file = results["wf_data_file"]
wf_method = results["wf_method"]
if wf_method not in WF_METHODS.keys():
print(f"ERROR: Unknown whitefield correction method {wf_method}. Skipping\n"
f"{params_required=}")
return
# TODO: cache white field data, only reload if file changed
# maybe store checksum in results as "_checksum"
try:
with h5py.File(wf_data_file, "r") as wfile:
whitefield_image = np.asarray(wfile["data/data"])
except Exception as error:
print(f"ERROR: Can't read whitefield from file {wf_data_file}. Skipping\n"
f"{error=}")
return
results["is_white_field_corrected"] = True
WF_METHODS[wf_method](data, whitefield_image)

View File

@@ -2,7 +2,8 @@ import argparse
import numpy as np
from algos import calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, JFData
from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis,
calc_streakfinder_analysis, calc_apply_whitefield_correction, JFData)
from utils import Aggregator, BufferedJSON, randskip, read_bit
from zmqsocks import ZMQSockets
@@ -115,6 +116,14 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host
calc_peakfinder_analysis(results, pfimage, pixel_mask_pf)
# ???
# White-field correction and streak finder processing for convergent-beam diffraction
print(f"Applying whitefield correction")
calc_apply_whitefield_correction(results, image) # changes image in place
print(f"Searching streaks")
calc_streakfinder_analysis(results, image, pixel_mask_pf)
print(f"Done\n{results=}")
image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator)
results["type"] = str(image.dtype)