From cb2c6d5ab213d4a8249aaf7e314a4736cd9725fc Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Tue, 24 Jun 2025 08:13:28 +0200 Subject: [PATCH 01/43] Added streak finding and white field correction for convergent-beam diffraction experiments; TODO: cleanup, document streakfinder installation or add to setup --- README.md | 35 +++++++++++++++++ dap/algos/__init__.py | 2 + dap/algos/streakfind.py | 63 ++++++++++++++++++++++++++++++ dap/algos/whitefield_correction.py | 62 +++++++++++++++++++++++++++++ dap/worker.py | 11 +++++- 5 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 dap/algos/streakfind.py create mode 100644 dap/algos/whitefield_correction.py diff --git a/README.md b/README.md index 794bc42..bfe38e4 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,40 @@ options: * `'spot_x/spot_y/spot_intensity': 3*list[float]` - Provides coordinates and intensity of the identified peaks within the frame. * `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified peaks exceeding the defined threshold. + * **White field correction Algorithm** + + Does the IN PLACE white field correction of the image + + Input parameters: + * `'do_whitefield_correction': 1/0` - Specifies whether to do in-place white field correction. + * `'wf_data_file': str` - Path to the hdf5 file with corrected white field image. + * `'wf_dataset': str` - Name of the dataset containing white field image in the hdf5 file. + * `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported. + + Algorithm Output: + * `'is_white_field_corrected': bool` - Indicates whether white field correction took place. + * Image is changed **in-place**. + + * **streakfinder Algorithm** + + This algorithm is using [streak-finder package](https://github.com/simply-nicky/streak_finder) - a connection-based streak finding algorithm for convergent beam diffraction patterns. + + Input parameters: + * `'do_streakfinder_analysis': 1/0` - Specifies whether to execute the streak-finder algorithm. + * `'sf_structure_radius': int` - Connectivity structure radius. + * `'sf_structure_rank': int` - Connectivity structure rank. + * `'sf_min_size': float` - Minimum number of linelets required in a detected streak. + * `'sf_vmin': float` - Peak threshold. All peaks with values lower than ``sf_vmin`` are discarded. + * `'sf_npts': int` - Support size threshold. The support structure is a connected set of pixels which + value is above the threshold ``sf_vmin``. A peak is discarded is the size of support + set is lower than ``sf_npts``. + * `'sf_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the + streak is no more than ``sf_xtol``. + + Algorithm Output: + * `'number_of_streaks': int` - Indicates the count of identified peaks. + * `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1 + * **Radial Profile Integration** This algorithm integrates pixel intensities radially based on defined parameters. @@ -242,3 +276,4 @@ Algorithms use input parameters specified in a JSON file provided to worker.py ( Special thanks to Valerio Mariani for providing the cython implementation of peakfinder8. +Special thanks to Nikolai Ivanov for providing the cython implementation of streak-finder. diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index 2b5f92e..27bcf04 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -7,6 +7,8 @@ from .peakfind import calc_peakfinder_analysis from .radprof import calc_radial_integration from .roi import calc_roi from .spiana import calc_spi_analysis +from .streakfind import calc_streakfinder_analysis +from .whitefield_correction import calc_apply_whitefield_correction from .thresh import calc_apply_threshold diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py new file mode 100644 index 0000000..af7d0e6 --- /dev/null +++ b/dap/algos/streakfind.py @@ -0,0 +1,63 @@ +""" +Streak Finder algorithm implemented by CFEL Chapman group + +Requires Convergent beam streak finder package installed: + +https://github.com/simply-nicky/streak_finder +(note g++ 11 required for building) +""" + +from streak_finder import PatternStreakFinder +from streak_finder.label import Structure2D + + +def calc_streakfinder_analysis(results, data, pixel_mask_sf): + do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) + if not do_streakfinder_analysis: + print(f"No streak finder analysis") + return + + params_required = [ + "sf_structure_radius", + "sf_structure_rank", + "sf_min_size", + "sf_vmin", + "sf_npts", + "sf_xtol" + ] + + if not all([param in results.keys() for param in params_required]): + print(f"ERROR: Not enough parameters for streak finder analysis. Skipping\n" + f"{params_required=}") + return + + radius = results["sf_structure_radius"] + rank = results["sf_structure_rank"] + min_size = results["sf_min_size"] + vmin = results["sf_vmin"] + npts = results["sf_npts"] + xtol = results["sf_xtol"] + + struct = Structure2D(radius, rank) + psf = PatternStreakFinder( + data=data, + mask=pixel_mask_sf, + structure=struct, + min_size=min_size + ) + # Find peaks in a pattern. Returns a sparse set of peaks which values are above a threshold + # ``vmin`` that have a supporing set of a size larger than ``npts``. The minimal distance + # between peaks is ``2 * structure.radius`` + peaks = psf.detect_peaks(vmin=vmin, npts=npts) + + # Streak finding algorithm. Starting from the set of seed peaks, the lines are iteratively + # extended with a connectivity structure. + streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin) + streak_lines = streaks.to_lines() + _, number_of_streaks = streak_lines.shape + print(f"Found {number_of_streaks} streaks") + list_result = [] + for line in streak_lines: # arr(4, n_lines); 0coord x0, y0, x1, y1 + list_result.append(line.tolist()) + results.update({"number_of_streaks": number_of_streaks}) + results.update({"streaks": list_result}) diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py new file mode 100644 index 0000000..c6eeed0 --- /dev/null +++ b/dap/algos/whitefield_correction.py @@ -0,0 +1,62 @@ +import numpy as np +import h5py + +def _div(image, whitefield): + image = np.divide( + image, + whitefield, + out=np.zeros_like(image), + where=whitefield != 0 + ) + return image + +def _sub(image, whitefield): + image -= whitefield + return image + +WF_METHODS = { + "div": _div, + "sub": _sub +} + + +def calc_apply_whitefield_correction(results, data): + """ + In-place white field correction of the detector data + """ + results["is_white_field_corrected"] = False + do_whitefield_correction = results.get("do_whitefield_correction", False) + if not do_whitefield_correction: + print(f"No whitefield correction") + return + + params_required = [ + "wf_data_file", + "wf_method", + ] + + if not all([param in results.keys() for param in params_required]): + print(f"ERROR: Not enough parameters for whitefield correction. Skipping\n" + f"{params_required=}") + return + + wf_data_file = results["wf_data_file"] + wf_method = results["wf_method"] + + if wf_method not in WF_METHODS.keys(): + print(f"ERROR: Unknown whitefield correction method {wf_method}. Skipping\n" + f"{params_required=}") + return + + # TODO: cache white field data, only reload if file changed + # maybe store checksum in results as "_checksum" + try: + with h5py.File(wf_data_file, "r") as wfile: + whitefield_image = np.asarray(wfile["data/data"]) + except Exception as error: + print(f"ERROR: Can't read whitefield from file {wf_data_file}. Skipping\n" + f"{error=}") + return + + results["is_white_field_corrected"] = True + WF_METHODS[wf_method](data, whitefield_image) diff --git a/dap/worker.py b/dap/worker.py index 63fdca3..c176d6f 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -2,7 +2,8 @@ import argparse import numpy as np -from algos import calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, JFData +from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, + calc_streakfinder_analysis, calc_apply_whitefield_correction, JFData) from utils import Aggregator, BufferedJSON, randskip, read_bit from zmqsocks import ZMQSockets @@ -115,6 +116,14 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host calc_peakfinder_analysis(results, pfimage, pixel_mask_pf) # ??? + + # White-field correction and streak finder processing for convergent-beam diffraction + print(f"Applying whitefield correction") + calc_apply_whitefield_correction(results, image) # changes image in place + print(f"Searching streaks") + calc_streakfinder_analysis(results, image, pixel_mask_pf) + print(f"Done\n{results=}") + image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) results["type"] = str(image.dtype) -- 2.49.1 From 68b108655bcc04791325e022469b4d56adbb6117 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Tue, 24 Jun 2025 10:09:04 +0200 Subject: [PATCH 02/43] Ensure wf correctrion to be in-place --- dap/algos/whitefield_correction.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py index c6eeed0..3b85fe4 100644 --- a/dap/algos/whitefield_correction.py +++ b/dap/algos/whitefield_correction.py @@ -1,18 +1,23 @@ import numpy as np import h5py + def _div(image, whitefield): - image = np.divide( + np.divide( image, whitefield, - out=np.zeros_like(image), + out=image, where=whitefield != 0 ) - return image + def _sub(image, whitefield): - image -= whitefield - return image + np.subtract( + image, + whitefield, + out=image, + ) + WF_METHODS = { "div": _div, @@ -60,3 +65,4 @@ def calc_apply_whitefield_correction(results, data): results["is_white_field_corrected"] = True WF_METHODS[wf_method](data, whitefield_image) + -- 2.49.1 From 12104e72ef99bf442f0dd0e9b67c193cdd144bd6 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 08:53:25 +0200 Subject: [PATCH 03/43] CBD streak finder: Correct shape of streaks data --- dap/algos/streakfind.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index af7d0e6..14d7cdc 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -53,7 +53,7 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): # Streak finding algorithm. Starting from the set of seed peaks, the lines are iteratively # extended with a connectivity structure. streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin) - streak_lines = streaks.to_lines() + streak_lines = streaks.to_lines().T _, number_of_streaks = streak_lines.shape print(f"Found {number_of_streaks} streaks") list_result = [] -- 2.49.1 From 02e15090cde649ba53a76c5a013c8495d3061dcd Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 09:22:14 +0200 Subject: [PATCH 04/43] Add example parameters for streak finder and whitefield correction to README --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bfe38e4..9d6b375 100644 --- a/README.md +++ b/README.md @@ -268,7 +268,18 @@ Algorithms use input parameters specified in a JSON file provided to worker.py ( "roi_x1": [], "roi_y1": [], "roi_x2": [], - "roi_y2": [] + "roi_y2": [], + "do_whitefield_correction": 1, + "wf_data_file": "/das/work/p22/p22263/whitefield/wf_div.h5", + "wf_method": "div", + "do_streakfinder_analysis": 1, + "sf_structure_radius": 10, + "sf_structure_rank": 2, + "sf_min_size": 7, + "sf_vmin": 75, + "sf_npts": 100, + "sf_xtol": 1.5 + } ``` -- 2.49.1 From cca5decacc9ecfd4ef8538d280715d5367917dff Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 10:21:56 +0200 Subject: [PATCH 05/43] Add whitefield correction error handling --- dap/algos/whitefield_correction.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py index 3b85fe4..b66eddd 100644 --- a/dap/algos/whitefield_correction.py +++ b/dap/algos/whitefield_correction.py @@ -63,6 +63,10 @@ def calc_apply_whitefield_correction(results, data): f"{error=}") return - results["is_white_field_corrected"] = True - WF_METHODS[wf_method](data, whitefield_image) - + try: + WF_METHODS[wf_method](data, whitefield_image) + except Exception as error: + print(f"ERROR: White field correction failed.\n" + f"{error=}") + else: + results["is_white_field_corrected"] = True -- 2.49.1 From a10c70028fa224177032e43376ce6ebe51b259c7 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 11:05:21 +0200 Subject: [PATCH 06/43] For white field correction, allow to specify dataset to read white field from --- README.md | 2 +- dap/algos/whitefield_correction.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9d6b375..372d1bf 100644 --- a/README.md +++ b/README.md @@ -112,7 +112,7 @@ options: Input parameters: * `'do_whitefield_correction': 1/0` - Specifies whether to do in-place white field correction. * `'wf_data_file': str` - Path to the hdf5 file with corrected white field image. - * `'wf_dataset': str` - Name of the dataset containing white field image in the hdf5 file. + * `'wf_dataset': str` [Optional] - Name of the dataset containing white field image in the hdf5 file, default is `"data/data"`. * `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported. Algorithm Output: diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py index b66eddd..ad8e07c 100644 --- a/dap/algos/whitefield_correction.py +++ b/dap/algos/whitefield_correction.py @@ -53,11 +53,12 @@ def calc_apply_whitefield_correction(results, data): f"{params_required=}") return + wf_dataset = results.get("wf_dataset", "data/data") # TODO: cache white field data, only reload if file changed # maybe store checksum in results as "_checksum" try: with h5py.File(wf_data_file, "r") as wfile: - whitefield_image = np.asarray(wfile["data/data"]) + whitefield_image = np.asarray(wfile[wf_dataset]) except Exception as error: print(f"ERROR: Can't read whitefield from file {wf_data_file}. Skipping\n" f"{error=}") -- 2.49.1 From 84486c6ff950a48802f26d43392fcfb206911985 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 11:15:43 +0200 Subject: [PATCH 07/43] Allow to apply additional mask that is read out of file - NumPy or hdf5 --- README.md | 14 +++++++++++- dap/algos/__init__.py | 1 + dap/algos/addmaskfile.py | 35 ++++++++++++++++++++++++++++++ dap/algos/jfdata.py | 2 ++ dap/algos/whitefield_correction.py | 5 ++--- 5 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 dap/algos/addmaskfile.py diff --git a/README.md b/README.md index 372d1bf..60ab466 100644 --- a/README.md +++ b/README.md @@ -116,7 +116,7 @@ options: * `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported. Algorithm Output: - * `'is_white_field_corrected': bool` - Indicates whether white field correction took place. + * `'white_field_correction_applied': 1/0` - Indicates whether the algorithm ran successfully. * Image is changed **in-place**. * **streakfinder Algorithm** @@ -231,6 +231,18 @@ options: Use the `'apply_additional_mask': 0/1` - Input flag to enable this functionality. + * **Additional Mask from file** + + Alternative to previous additional masking, mask data is read from specified file. NumPy and HDF5 formats are supported. + + Input parameters: + * `'apply_additional_mask': 1/0` - Input flag to enable this functionality. + * `'mask_file': str` - Path to the hdf5 file with mask data. + * `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"data/data"`. + + Algorithm Output: + * `'mask_from_file_applied': 1/0` - Indicates whether the algorithm ran successfully. + * **Filter based on pulse picker information** If the event propagation capability is accessible for the detector and the pulse picker information is correctly configured for propagation, the filtration based on pulse picker information becomes feasible by using the diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index 27bcf04..b9bf2a5 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -1,5 +1,6 @@ from .addmask import calc_apply_additional_mask +from .addmaskfile import calc_apply_additional_mask_from_file from .aggregation import calc_apply_aggregation from .jfdata import JFData from .mask import calc_mask_pixels diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py new file mode 100644 index 0000000..7787195 --- /dev/null +++ b/dap/algos/addmaskfile.py @@ -0,0 +1,35 @@ +import h5py +import numpy as np + + +def calc_apply_additional_mask_from_file(results, pixel_mask_pf): + apply_additional_mask = results.get("apply_additional_mask_from_file", False) + if not apply_additional_mask: + return + results["mask_from_file_applied"] = 0 + mask_file = results.get("mask_file", None) + if not mask_file: + return + mask_dataset = results.get("mask_ds", "data/data") + + # Support for hdf5 and npy + if mask_file.endswith(".npy"): + try: + mask = np.load(mask_file) + except Exception as error: + print(f"Error loading mask data from NumPy file {mask_file}:\n{error}") + return + else: + try: + with h5py.File(mask_file, "r") as mask_file: + mask = np.asarray(mask_file[mask_dataset]) + except Exception as error: + print(f"Error loading mask from hdf5 file {mask_file}:\n{error}") + return + + try: + np.multiply(pixel_mask_pf, mask, out=pixel_mask_pf) + except Exception as error: + print(f"Error applying additional mask from file {mask_file}:\n{error}") + else: + results["mask_from_file_applied"] = 1 diff --git a/dap/algos/jfdata.py b/dap/algos/jfdata.py index bd2fa1e..f204de8 100644 --- a/dap/algos/jfdata.py +++ b/dap/algos/jfdata.py @@ -3,6 +3,7 @@ import numpy as np import jungfrau_utils as ju from .addmask import calc_apply_additional_mask +from .addmaskfile import calc_apply_additional_mask_from_file class JFData: @@ -58,6 +59,7 @@ class JFData: pixel_mask_pf = np.ascontiguousarray(pixel_mask_corrected) calc_apply_additional_mask(results, pixel_mask_pf) # changes pixel_mask_pf in place + calc_apply_additional_mask_from_file(results, pixel_mask_pf) # changes pixel_mask_pf in place self.id_pixel_mask_corrected = new_id_pixel_mask_corrected self.pixel_mask_pf = pixel_mask_pf diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py index ad8e07c..37785e3 100644 --- a/dap/algos/whitefield_correction.py +++ b/dap/algos/whitefield_correction.py @@ -29,12 +29,11 @@ def calc_apply_whitefield_correction(results, data): """ In-place white field correction of the detector data """ - results["is_white_field_corrected"] = False do_whitefield_correction = results.get("do_whitefield_correction", False) if not do_whitefield_correction: - print(f"No whitefield correction") return + results["white_field_correction_applied"] = 0 params_required = [ "wf_data_file", "wf_method", @@ -70,4 +69,4 @@ def calc_apply_whitefield_correction(results, data): print(f"ERROR: White field correction failed.\n" f"{error=}") else: - results["is_white_field_corrected"] = True + results["white_field_correction_applied"] = 1 -- 2.49.1 From a69498ce7e7c089422be8c3a6a58153f5ef80b64 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Wed, 25 Jun 2025 11:30:33 +0200 Subject: [PATCH 08/43] Correct parameter name in README for mask from file --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 60ab466..c2bc953 100644 --- a/README.md +++ b/README.md @@ -236,7 +236,7 @@ options: Alternative to previous additional masking, mask data is read from specified file. NumPy and HDF5 formats are supported. Input parameters: - * `'apply_additional_mask': 1/0` - Input flag to enable this functionality. + * `'apply_additional_mask_from_file': 1/0` - Input flag to enable this functionality. * `'mask_file': str` - Path to the hdf5 file with mask data. * `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"data/data"`. -- 2.49.1 From 3c48072d8428835471566dfb3c254007ba7177c2 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Wed, 25 Jun 2025 15:32:55 +0200 Subject: [PATCH 09/43] Pixel mask that is read from file is converted to bool --- dap/algos/addmaskfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py index 7787195..e62e94d 100644 --- a/dap/algos/addmaskfile.py +++ b/dap/algos/addmaskfile.py @@ -15,14 +15,14 @@ def calc_apply_additional_mask_from_file(results, pixel_mask_pf): # Support for hdf5 and npy if mask_file.endswith(".npy"): try: - mask = np.load(mask_file) + mask = np.asarray(np.load(mask_file), dtype=bool) except Exception as error: print(f"Error loading mask data from NumPy file {mask_file}:\n{error}") return else: try: with h5py.File(mask_file, "r") as mask_file: - mask = np.asarray(mask_file[mask_dataset]) + mask = np.asarray(mask_file[mask_dataset], dtype=bool) except Exception as error: print(f"Error loading mask from hdf5 file {mask_file}:\n{error}") return -- 2.49.1 From 8f66420538dcc88bfb5d33704819768fc433d803 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 30 Jun 2025 09:13:52 +0200 Subject: [PATCH 10/43] Updated README - update json settings file example --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c2bc953..97f8438 100644 --- a/README.md +++ b/README.md @@ -282,8 +282,10 @@ Algorithms use input parameters specified in a JSON file provided to worker.py ( "roi_x2": [], "roi_y2": [], "do_whitefield_correction": 1, - "wf_data_file": "/das/work/p22/p22263/whitefield/wf_div.h5", + "wf_data_file": "/sf/bernina/exp/00m_musterman/res/whitefield/JF07T32V01_wf_div.h5", "wf_method": "div", + "apply_additional_mask_from_file": 1, + "mask_file": "/sf/bernina/exp/00m_musterman/res/mask/JF07T32V01_mask.h5", "do_streakfinder_analysis": 1, "sf_structure_radius": 10, "sf_structure_rank": 2, -- 2.49.1 From 0962f4834941e840539e17779c02a8082cde294e Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 30 Jun 2025 13:26:24 +0200 Subject: [PATCH 11/43] More parameters returned by streak finder: lengths and Bragg counts of all identified streaks --- README.md | 4 +++- dap/algos/streakfind.py | 15 +++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 97f8438..f3ee580 100644 --- a/README.md +++ b/README.md @@ -136,7 +136,9 @@ options: streak is no more than ``sf_xtol``. Algorithm Output: - * `'number_of_streaks': int` - Indicates the count of identified peaks. + * `'number_of_streaks': int` - Indicates the count of identified streaks. + * `'streak_lengths': list[float]` - Provides the lengths of identified streaks. + * `'bragg_counts': list[float]` - Provides the intensity sum within identified streaks. * `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1 * **Radial Profile Integration** diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 14d7cdc..9b66bfb 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -6,10 +6,13 @@ Requires Convergent beam streak finder package installed: https://github.com/simply-nicky/streak_finder (note g++ 11 required for building) """ +from math import sqrt, pow from streak_finder import PatternStreakFinder from streak_finder.label import Structure2D +from skimage.measure import profile_line + def calc_streakfinder_analysis(results, data, pixel_mask_sf): do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) @@ -52,8 +55,14 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): # Streak finding algorithm. Starting from the set of seed peaks, the lines are iteratively # extended with a connectivity structure. - streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin) - streak_lines = streaks.to_lines().T + streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin).to_lines() + streak_lengths = [] + bragg_counts = [] + for streak in streaks: + x0, y0, x1, y1 = streak + streak_lengths.append(sqrt(pow((x1 - x0), 2) + pow((y1 - y0), 2))) + bragg_counts.append(profile_line(data, (x0, y0), (x1, y1))) + streak_lines = streaks.T _, number_of_streaks = streak_lines.shape print(f"Found {number_of_streaks} streaks") list_result = [] @@ -61,3 +70,5 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): list_result.append(line.tolist()) results.update({"number_of_streaks": number_of_streaks}) results.update({"streaks": list_result}) + results.update({"streak_lengths": streak_lengths}) + results.update({"bragg_counts": bragg_counts}) -- 2.49.1 From 3e9da9aae51c2cf937c72833ce2a5ce2b08d370b Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Mon, 30 Jun 2025 13:47:44 +0200 Subject: [PATCH 12/43] Bug fix in streak finder; Mark frames with streaks as a hit --- dap/algos/streakfind.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 9b66bfb..757a40e 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -6,6 +6,7 @@ Requires Convergent beam streak finder package installed: https://github.com/simply-nicky/streak_finder (note g++ 11 required for building) """ +import numpy as np from math import sqrt, pow from streak_finder import PatternStreakFinder @@ -61,7 +62,7 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): for streak in streaks: x0, y0, x1, y1 = streak streak_lengths.append(sqrt(pow((x1 - x0), 2) + pow((y1 - y0), 2))) - bragg_counts.append(profile_line(data, (x0, y0), (x1, y1))) + bragg_counts.append(float(np.sum(profile_line(data, (x0, y0), (x1, y1))))) streak_lines = streaks.T _, number_of_streaks = streak_lines.shape print(f"Found {number_of_streaks} streaks") @@ -69,6 +70,7 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): for line in streak_lines: # arr(4, n_lines); 0coord x0, y0, x1, y1 list_result.append(line.tolist()) results.update({"number_of_streaks": number_of_streaks}) + results.update({"is_hit_frame": number_of_streaks > 0}) results.update({"streaks": list_result}) results.update({"streak_lengths": streak_lengths}) results.update({"bragg_counts": bragg_counts}) -- 2.49.1 From fe23223887708cdb3b3717b1490560c87abc9bb7 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 30 Jun 2025 13:51:07 +0200 Subject: [PATCH 13/43] Update readme - streak finder now marks frames as hits --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f3ee580..e792a3a 100644 --- a/README.md +++ b/README.md @@ -139,7 +139,8 @@ options: * `'number_of_streaks': int` - Indicates the count of identified streaks. * `'streak_lengths': list[float]` - Provides the lengths of identified streaks. * `'bragg_counts': list[float]` - Provides the intensity sum within identified streaks. - * `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1 + * `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1. + * `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified streaks. * **Radial Profile Integration** -- 2.49.1 From 6e695a67346c254bd1291e9d0bbfe4e5cc05b914 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Thu, 3 Jul 2025 15:22:34 +0200 Subject: [PATCH 14/43] New streak finder --- dap/algos/__init__.py | 3 +- dap/algos/streakfind.py | 160 +++++++++++++++++++++-------- dap/algos/whitefield_correction.py | 2 + dap/worker.py | 9 +- 4 files changed, 125 insertions(+), 49 deletions(-) diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index b9bf2a5..7f0ff39 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -8,8 +8,7 @@ from .peakfind import calc_peakfinder_analysis from .radprof import calc_radial_integration from .roi import calc_roi from .spiana import calc_spi_analysis -from .streakfind import calc_streakfinder_analysis -from .whitefield_correction import calc_apply_whitefield_correction +from .streakfind import calc_cbd_analysis from .thresh import calc_apply_threshold diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 757a40e..51e1026 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -3,31 +3,99 @@ Streak Finder algorithm implemented by CFEL Chapman group Requires Convergent beam streak finder package installed: -https://github.com/simply-nicky/streak_finder -(note g++ 11 required for building) +https://github.com/simply-nicky/streak_finder/tree/swiss_fel +(note g++ 11 required for building, numpy 2+ required) """ +import h5py import numpy as np -from math import sqrt, pow - -from streak_finder import PatternStreakFinder +from streak_finder import CrystData from streak_finder.label import Structure2D -from skimage.measure import profile_line +DEFAULT_NUM_THREADS = 16 +def calc_cbd_analysis(results, data, pf_pixel_mask): + try: + cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) + except Exception as error: # Broad exception - we don't want to break anything here + print(f"Error processing CBD data:\n{error}") + results["cbd_error"] = f"Error processing CBD data:\n{error}" + return data -def calc_streakfinder_analysis(results, data, pixel_mask_sf): - do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) - if not do_streakfinder_analysis: - print(f"No streak finder analysis") + try: + _calc_streakfinder_analysis(results, cryst_data) + except Exception as error: # Broad exception - we don't want to break anything here + print(f"Error processing CBD data:\n{error}") + results["cbd_error"] = f"Error processing CBD data:\n{error}" + return cryst_data.snr + +def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: + do_snr = results.get("do_snr", False) + if not do_snr: return params_required = [ - "sf_structure_radius", - "sf_structure_rank", + "whitefield_data_file", + "mask_data_file", + "std_data_file", + "scale_whitefield", # Bool + ] + + if not all([param in results.keys() for param in params_required]): + raise ValueError(f"ERROR: Not enough parameters for CBD correction. Skipping\n" + f"{params_required=}") + + whitefield_data_file = results["whitefield_data_file"] + mask_data_file = results["mask_data_file"] + std_data_file = results["std_data_file"] + scale_whitefield = results["scale_whitefield"] + + # Using CXI Store specification as default + whitefield_dataset = results.get("whitefield_dataset", "entry/crystallography/whitefield") + mask_dataset = results.get("mask_dataset", "entry/instrument/detector/mask") + std_dataset = results.get("std_dataset", "entry/crystallography/std") + + num_threads = results.get("num_threads", DEFAULT_NUM_THREADS) + + with h5py.File(whitefield_data_file, "r") as hf: + whitefield = np.asarray(hf[whitefield_dataset]) + + with h5py.File(mask_data_file, "r") as hf: + mask = np.asarray(hf[mask_dataset]) + + with h5py.File(std_data_file, "r") as hf: + std = np.asarray(hf[std_dataset]) + + data = CrystData( + data=data.reshape((-1,) + data.shape[-2:]), + mask=mask*pf_pixel_mask, + std=std, + whitefield=whitefield + ) + if scale_whitefield: + data = data.scale_whitefield(method='median', num_threads=num_threads) + + return data + +def _calc_streakfinder_analysis(results, cryst_data: CrystData): + do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) + if not do_streakfinder_analysis: + return + + params_required = [ + "sf_peak_structure_radius", + "sf_peak_structure_rank", + "sf_streak_structure_radius", + "sf_streak_structure_rank", + "sf_peak_vmin", + "sf_streak_vmin", "sf_min_size", - "sf_vmin", "sf_npts", - "sf_xtol" + "sf_xtol", + "sf_nfa", + + "sf_num_threads", + # "beam_center_x", + # "beam_center_y" ] if not all([param in results.keys() for param in params_required]): @@ -35,40 +103,50 @@ def calc_streakfinder_analysis(results, data, pixel_mask_sf): f"{params_required=}") return - radius = results["sf_structure_radius"] - rank = results["sf_structure_rank"] + peak_structure_radius = results["sf_peak_structure_radius"] # peak + peak_structure_rank = results["sf_peak_structure_rank"] + streak_structure_radius = results["sf_streak_structure_radius"] # streak + streak_structure_rank = results["sf_streak_structure_rank"] + peak_vmin = results["sf_peak_vmin"] # peak + streak_vmin = results["sf_streak_vmin"] # streak min_size = results["sf_min_size"] - vmin = results["sf_vmin"] npts = results["sf_npts"] xtol = results["sf_xtol"] + nfa = results["sf_nfa"] + num_threads = results["sf_num_threads"] - struct = Structure2D(radius, rank) - psf = PatternStreakFinder( - data=data, - mask=pixel_mask_sf, - structure=struct, - min_size=min_size - ) - # Find peaks in a pattern. Returns a sparse set of peaks which values are above a threshold - # ``vmin`` that have a supporing set of a size larger than ``npts``. The minimal distance - # between peaks is ``2 * structure.radius`` - peaks = psf.detect_peaks(vmin=vmin, npts=npts) + x_center = results.get("beam_center_x", None) + y_center = results.get("beam_center_y", None) - # Streak finding algorithm. Starting from the set of seed peaks, the lines are iteratively - # extended with a connectivity structure. - streaks = psf.detect_streaks(peaks=peaks, xtol=xtol, vmin=vmin).to_lines() - streak_lengths = [] - bragg_counts = [] - for streak in streaks: - x0, y0, x1, y1 = streak - streak_lengths.append(sqrt(pow((x1 - x0), 2) + pow((y1 - y0), 2))) - bragg_counts.append(float(np.sum(profile_line(data, (x0, y0), (x1, y1))))) - streak_lines = streaks.T + peaks_structure = Structure2D(peak_structure_radius, peak_structure_rank) + streaks_structure = Structure2D(streak_structure_radius, streak_structure_rank) + + + det_obj = cryst_data.streak_detector(streaks_structure) + peaks = det_obj.detect_peaks(peak_vmin, npts, peaks_structure, num_threads) + detected = det_obj.detect_streaks(peaks, xtol, streak_vmin, min_size, nfa=nfa, + num_threads=num_threads) + if isinstance(detected, list): + detected = detected[0] + + streaks = det_obj.to_streaks(detected) + + if x_center is not None and y_center is not None: + streaks = streaks.concentric_only(x_center, y_center) + + streak_lines = streaks.lines + streak_lengths = np.sqrt( + np.pow((streak_lines[..., 2] - streak_lines[..., 0]), 2) + + np.pow((streak_lines[..., 2] - streak_lines[..., 0]), 2) + ).tolist() + + streak_lines = streak_lines.T _, number_of_streaks = streak_lines.shape print(f"Found {number_of_streaks} streaks") - list_result = [] - for line in streak_lines: # arr(4, n_lines); 0coord x0, y0, x1, y1 - list_result.append(line.tolist()) + + list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 + bragg_counts = [streak.total_mass() for streak in detected.streaks.values()] + results.update({"number_of_streaks": number_of_streaks}) results.update({"is_hit_frame": number_of_streaks > 0}) results.update({"streaks": list_result}) diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py index 37785e3..cae9c14 100644 --- a/dap/algos/whitefield_correction.py +++ b/dap/algos/whitefield_correction.py @@ -70,3 +70,5 @@ def calc_apply_whitefield_correction(results, data): f"{error=}") else: results["white_field_correction_applied"] = 1 + + return whitefield_image \ No newline at end of file diff --git a/dap/worker.py b/dap/worker.py index c176d6f..30b31eb 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -3,7 +3,7 @@ import argparse import numpy as np from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, - calc_streakfinder_analysis, calc_apply_whitefield_correction, JFData) + calc_cbd_analysis, JFData) from utils import Aggregator, BufferedJSON, randskip, read_bit from zmqsocks import ZMQSockets @@ -117,11 +117,8 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host # ??? - # White-field correction and streak finder processing for convergent-beam diffraction - print(f"Applying whitefield correction") - calc_apply_whitefield_correction(results, image) # changes image in place - print(f"Searching streaks") - calc_streakfinder_analysis(results, image, pixel_mask_pf) + # Correction and streak finder processing for convergent-beam diffraction + image = calc_cbd_analysis(results, image, pixel_mask_pf) print(f"Done\n{results=}") image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) -- 2.49.1 From 2852a5320af7d2421ab686dac27b01ac3047e8d8 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Fri, 4 Jul 2025 08:43:28 +0200 Subject: [PATCH 15/43] Bug fixes in new streak finder --- dap/algos/streakfind.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 51e1026..67fd5c3 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -14,6 +14,10 @@ from streak_finder.label import Structure2D DEFAULT_NUM_THREADS = 16 def calc_cbd_analysis(results, data, pf_pixel_mask): + do_snr = results.get("do_snr", False) + if not do_snr: + return data + try: cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) except Exception as error: # Broad exception - we don't want to break anything here @@ -26,13 +30,10 @@ def calc_cbd_analysis(results, data, pf_pixel_mask): except Exception as error: # Broad exception - we don't want to break anything here print(f"Error processing CBD data:\n{error}") results["cbd_error"] = f"Error processing CBD data:\n{error}" - return cryst_data.snr + print(f"Returning data shape {cryst_data.snr[0].shape}") + return cryst_data.snr[0] def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: - do_snr = results.get("do_snr", False) - if not do_snr: - return - params_required = [ "whitefield_data_file", "mask_data_file", @@ -73,7 +74,8 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: ) if scale_whitefield: data = data.scale_whitefield(method='median', num_threads=num_threads) - + + data = data.update_snr() return data def _calc_streakfinder_analysis(results, cryst_data: CrystData): -- 2.49.1 From 6ff5e54be770ef9ab781d1aaca413dc6f6398c73 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 4 Jul 2025 08:55:20 +0200 Subject: [PATCH 16/43] Return early if no streaks detected --- dap/algos/streakfind.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 67fd5c3..04b7372 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -128,6 +128,14 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): peaks = det_obj.detect_peaks(peak_vmin, npts, peaks_structure, num_threads) detected = det_obj.detect_streaks(peaks, xtol, streak_vmin, min_size, nfa=nfa, num_threads=num_threads) + if not detected.streaks: + results.update({"number_of_streaks": 0}) + results.update({"is_hit_frame": False}) + results.update({"streaks": []}) + results.update({"streak_lengths": []}) + results.update({"bragg_counts": []}) + return + if isinstance(detected, list): detected = detected[0] -- 2.49.1 From a87b84e6c288c693dc6ffa3eb363b92a26408bc0 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 4 Jul 2025 08:56:45 +0200 Subject: [PATCH 17/43] Correct order of calls in streakfind --- dap/algos/streakfind.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 04b7372..98ca4e2 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -128,6 +128,10 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): peaks = det_obj.detect_peaks(peak_vmin, npts, peaks_structure, num_threads) detected = det_obj.detect_streaks(peaks, xtol, streak_vmin, min_size, nfa=nfa, num_threads=num_threads) + + if isinstance(detected, list): + detected = detected[0] + if not detected.streaks: results.update({"number_of_streaks": 0}) results.update({"is_hit_frame": False}) @@ -136,9 +140,6 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): results.update({"bragg_counts": []}) return - if isinstance(detected, list): - detected = detected[0] - streaks = det_obj.to_streaks(detected) if x_center is not None and y_center is not None: -- 2.49.1 From 2e066ef3a7d8d2fd2a68e1359bea452a050ef2b9 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 4 Jul 2025 10:55:04 +0200 Subject: [PATCH 18/43] Have mask and whitefield also available as it was outside of streak finder --- dap/algos/__init__.py | 3 ++- dap/algos/streakfind.py | 11 ++++++++--- dap/worker.py | 9 ++++++--- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index 7f0ff39..b9bf2a5 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -8,7 +8,8 @@ from .peakfind import calc_peakfinder_analysis from .radprof import calc_radial_integration from .roi import calc_roi from .spiana import calc_spi_analysis -from .streakfind import calc_cbd_analysis +from .streakfind import calc_streakfinder_analysis +from .whitefield_correction import calc_apply_whitefield_correction from .thresh import calc_apply_threshold diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 98ca4e2..ee47f1f 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -3,7 +3,7 @@ Streak Finder algorithm implemented by CFEL Chapman group Requires Convergent beam streak finder package installed: -https://github.com/simply-nicky/streak_finder/tree/swiss_fel +https://github.com/simply-nicky/streak_finder/ (note g++ 11 required for building, numpy 2+ required) """ import h5py @@ -13,9 +13,10 @@ from streak_finder.label import Structure2D DEFAULT_NUM_THREADS = 16 -def calc_cbd_analysis(results, data, pf_pixel_mask): +def calc_streakfinder_analysis(results, data, pf_pixel_mask): do_snr = results.get("do_snr", False) - if not do_snr: + do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) + if not do_snr and not do_streakfinder_analysis: return data try: @@ -30,6 +31,10 @@ def calc_cbd_analysis(results, data, pf_pixel_mask): except Exception as error: # Broad exception - we don't want to break anything here print(f"Error processing CBD data:\n{error}") results["cbd_error"] = f"Error processing CBD data:\n{error}" + + if not do_snr: + return data + print(f"Returning data shape {cryst_data.snr[0].shape}") return cryst_data.snr[0] diff --git a/dap/worker.py b/dap/worker.py index 30b31eb..8ea7e66 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -3,7 +3,7 @@ import argparse import numpy as np from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, - calc_cbd_analysis, JFData) + calc_apply_whitefield_correction, calc_streakfinder_analysis, JFData) from utils import Aggregator, BufferedJSON, randskip, read_bit from zmqsocks import ZMQSockets @@ -117,8 +117,11 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host # ??? - # Correction and streak finder processing for convergent-beam diffraction - image = calc_cbd_analysis(results, image, pixel_mask_pf) + # White-field correction and streak finder processing for convergent-beam diffraction + print(f"Applying whitefield correction") + calc_apply_whitefield_correction(results, image) # changes image in place + print(f"Searching streaks") + calc_streakfinder_analysis(results, image, pixel_mask_pf) print(f"Done\n{results=}") image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) -- 2.49.1 From ff532af0d6624d713bc5f461cf7c33f8a16c2a2e Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 4 Jul 2025 11:14:32 +0200 Subject: [PATCH 19/43] Chane image in-place if snr selected; Use concentric mask --- dap/algos/streakfind.py | 7 ++++--- dap/worker.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index ee47f1f..8e99e07 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -148,9 +148,9 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): streaks = det_obj.to_streaks(detected) if x_center is not None and y_center is not None: - streaks = streaks.concentric_only(x_center, y_center) + streaks_mask = streaks.concentric_only(x_center, y_center) - streak_lines = streaks.lines + streak_lines = streaks.lines[streaks_mask] streak_lengths = np.sqrt( np.pow((streak_lines[..., 2] - streak_lines[..., 0]), 2) + np.pow((streak_lines[..., 2] - streak_lines[..., 0]), 2) @@ -161,7 +161,8 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): print(f"Found {number_of_streaks} streaks") list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 - bragg_counts = [streak.total_mass() for streak in detected.streaks.values()] + detected_streaks = np.asarray(detected.streaks.values())[streaks_mask] + bragg_counts = [streak.total_mass() for streak in detected_streaks] results.update({"number_of_streaks": number_of_streaks}) results.update({"is_hit_frame": number_of_streaks > 0}) diff --git a/dap/worker.py b/dap/worker.py index 8ea7e66..02f0c0b 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -121,7 +121,7 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host print(f"Applying whitefield correction") calc_apply_whitefield_correction(results, image) # changes image in place print(f"Searching streaks") - calc_streakfinder_analysis(results, image, pixel_mask_pf) + image = calc_streakfinder_analysis(results, image, pixel_mask_pf) # changes image in place is do_snr=True print(f"Done\n{results=}") image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) -- 2.49.1 From ca6ef2453ab83d7b506ed2c82ea7cbebbdb474d6 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Fri, 4 Jul 2025 11:57:08 +0200 Subject: [PATCH 20/43] Bug fix in streak finder; TODO: mask from file is not refreshed on file change --- dap/algos/streakfind.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 8e99e07..087f74d 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -161,7 +161,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): print(f"Found {number_of_streaks} streaks") list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 - detected_streaks = np.asarray(detected.streaks.values())[streaks_mask] + detected_streaks = np.asarray(list(detected.streaks.values()))[streaks_mask] bragg_counts = [streak.total_mass() for streak in detected_streaks] results.update({"number_of_streaks": number_of_streaks}) -- 2.49.1 From c6d10b2faed077dfe2bdffd45b4e20948ffdd4a0 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 7 Jul 2025 09:00:57 +0200 Subject: [PATCH 21/43] Bugfixes, add mask rois to streak finder; Allows masking out bad rois or a part of sensor to speed up --- dap/algos/streakfind.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 8e99e07..1b32854 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -26,17 +26,18 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): results["cbd_error"] = f"Error processing CBD data:\n{error}" return data + if do_snr: + # Changes data and mask in-place + data = cryst_data.snr[0].copy() + np.multiply(pf_pixel_mask, cryst_data.mask, out=pf_pixel_mask) + try: _calc_streakfinder_analysis(results, cryst_data) except Exception as error: # Broad exception - we don't want to break anything here print(f"Error processing CBD data:\n{error}") results["cbd_error"] = f"Error processing CBD data:\n{error}" - if not do_snr: - return data - - print(f"Returning data shape {cryst_data.snr[0].shape}") - return cryst_data.snr[0] + return data def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: params_required = [ @@ -66,7 +67,7 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: whitefield = np.asarray(hf[whitefield_dataset]) with h5py.File(mask_data_file, "r") as hf: - mask = np.asarray(hf[mask_dataset]) + mask = np.asarray(hf[mask_dataset], dtype=np.bool) with h5py.File(std_data_file, "r") as hf: std = np.asarray(hf[std_dataset]) @@ -125,6 +126,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): x_center = results.get("beam_center_x", None) y_center = results.get("beam_center_y", None) + mask_rois = results.get("sf_mask_rois", []) # list of [y_min, y_max, x_min, x_max] + + for mask_roi in mask_rois: + cryst_data = cryst_data.mask_region(mask_roi) + peaks_structure = Structure2D(peak_structure_radius, peak_structure_rank) streaks_structure = Structure2D(streak_structure_radius, streak_structure_rank) @@ -161,6 +167,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): print(f"Found {number_of_streaks} streaks") list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 + detected_streaks = np.asarray(detected.streaks.values())[streaks_mask] bragg_counts = [streak.total_mass() for streak in detected_streaks] -- 2.49.1 From 76b8d957dde75e907c39f730afeec797615790a6 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 7 Jul 2025 09:28:41 +0200 Subject: [PATCH 22/43] Code cleanup; TODO: update README --- README.md | 26 ----------- dap/algos/__init__.py | 2 - dap/algos/addmaskfile.py | 35 -------------- dap/algos/jfdata.py | 2 - dap/algos/streakfind.py | 5 +- dap/algos/whitefield_correction.py | 74 ------------------------------ dap/worker.py | 13 +++--- 7 files changed, 7 insertions(+), 150 deletions(-) delete mode 100644 dap/algos/addmaskfile.py delete mode 100644 dap/algos/whitefield_correction.py diff --git a/README.md b/README.md index e792a3a..fe1b7a7 100644 --- a/README.md +++ b/README.md @@ -105,20 +105,6 @@ options: * `'spot_x/spot_y/spot_intensity': 3*list[float]` - Provides coordinates and intensity of the identified peaks within the frame. * `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified peaks exceeding the defined threshold. - * **White field correction Algorithm** - - Does the IN PLACE white field correction of the image - - Input parameters: - * `'do_whitefield_correction': 1/0` - Specifies whether to do in-place white field correction. - * `'wf_data_file': str` - Path to the hdf5 file with corrected white field image. - * `'wf_dataset': str` [Optional] - Name of the dataset containing white field image in the hdf5 file, default is `"data/data"`. - * `'wf_method': 'div'|'sub'` - Method of white field correction - either division or subtraction is supported. - - Algorithm Output: - * `'white_field_correction_applied': 1/0` - Indicates whether the algorithm ran successfully. - * Image is changed **in-place**. - * **streakfinder Algorithm** This algorithm is using [streak-finder package](https://github.com/simply-nicky/streak_finder) - a connection-based streak finding algorithm for convergent beam diffraction patterns. @@ -234,18 +220,6 @@ options: Use the `'apply_additional_mask': 0/1` - Input flag to enable this functionality. - * **Additional Mask from file** - - Alternative to previous additional masking, mask data is read from specified file. NumPy and HDF5 formats are supported. - - Input parameters: - * `'apply_additional_mask_from_file': 1/0` - Input flag to enable this functionality. - * `'mask_file': str` - Path to the hdf5 file with mask data. - * `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"data/data"`. - - Algorithm Output: - * `'mask_from_file_applied': 1/0` - Indicates whether the algorithm ran successfully. - * **Filter based on pulse picker information** If the event propagation capability is accessible for the detector and the pulse picker information is correctly configured for propagation, the filtration based on pulse picker information becomes feasible by using the diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index b9bf2a5..501d88d 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -1,6 +1,5 @@ from .addmask import calc_apply_additional_mask -from .addmaskfile import calc_apply_additional_mask_from_file from .aggregation import calc_apply_aggregation from .jfdata import JFData from .mask import calc_mask_pixels @@ -9,7 +8,6 @@ from .radprof import calc_radial_integration from .roi import calc_roi from .spiana import calc_spi_analysis from .streakfind import calc_streakfinder_analysis -from .whitefield_correction import calc_apply_whitefield_correction from .thresh import calc_apply_threshold diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py deleted file mode 100644 index e62e94d..0000000 --- a/dap/algos/addmaskfile.py +++ /dev/null @@ -1,35 +0,0 @@ -import h5py -import numpy as np - - -def calc_apply_additional_mask_from_file(results, pixel_mask_pf): - apply_additional_mask = results.get("apply_additional_mask_from_file", False) - if not apply_additional_mask: - return - results["mask_from_file_applied"] = 0 - mask_file = results.get("mask_file", None) - if not mask_file: - return - mask_dataset = results.get("mask_ds", "data/data") - - # Support for hdf5 and npy - if mask_file.endswith(".npy"): - try: - mask = np.asarray(np.load(mask_file), dtype=bool) - except Exception as error: - print(f"Error loading mask data from NumPy file {mask_file}:\n{error}") - return - else: - try: - with h5py.File(mask_file, "r") as mask_file: - mask = np.asarray(mask_file[mask_dataset], dtype=bool) - except Exception as error: - print(f"Error loading mask from hdf5 file {mask_file}:\n{error}") - return - - try: - np.multiply(pixel_mask_pf, mask, out=pixel_mask_pf) - except Exception as error: - print(f"Error applying additional mask from file {mask_file}:\n{error}") - else: - results["mask_from_file_applied"] = 1 diff --git a/dap/algos/jfdata.py b/dap/algos/jfdata.py index f204de8..bd2fa1e 100644 --- a/dap/algos/jfdata.py +++ b/dap/algos/jfdata.py @@ -3,7 +3,6 @@ import numpy as np import jungfrau_utils as ju from .addmask import calc_apply_additional_mask -from .addmaskfile import calc_apply_additional_mask_from_file class JFData: @@ -59,7 +58,6 @@ class JFData: pixel_mask_pf = np.ascontiguousarray(pixel_mask_corrected) calc_apply_additional_mask(results, pixel_mask_pf) # changes pixel_mask_pf in place - calc_apply_additional_mask_from_file(results, pixel_mask_pf) # changes pixel_mask_pf in place self.id_pixel_mask_corrected = new_id_pixel_mask_corrected self.pixel_mask_pf = pixel_mask_pf diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 7584a23..f4c652f 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -100,14 +100,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): "sf_npts", "sf_xtol", "sf_nfa", - "sf_num_threads", - # "beam_center_x", - # "beam_center_y" ] if not all([param in results.keys() for param in params_required]): - print(f"ERROR: Not enough parameters for streak finder analysis. Skipping\n" + print(f"ERROR: Not enough parameters for streak finder analysis. Skipping.\n" f"{params_required=}") return diff --git a/dap/algos/whitefield_correction.py b/dap/algos/whitefield_correction.py deleted file mode 100644 index cae9c14..0000000 --- a/dap/algos/whitefield_correction.py +++ /dev/null @@ -1,74 +0,0 @@ -import numpy as np -import h5py - - -def _div(image, whitefield): - np.divide( - image, - whitefield, - out=image, - where=whitefield != 0 - ) - - -def _sub(image, whitefield): - np.subtract( - image, - whitefield, - out=image, - ) - - -WF_METHODS = { - "div": _div, - "sub": _sub -} - - -def calc_apply_whitefield_correction(results, data): - """ - In-place white field correction of the detector data - """ - do_whitefield_correction = results.get("do_whitefield_correction", False) - if not do_whitefield_correction: - return - - results["white_field_correction_applied"] = 0 - params_required = [ - "wf_data_file", - "wf_method", - ] - - if not all([param in results.keys() for param in params_required]): - print(f"ERROR: Not enough parameters for whitefield correction. Skipping\n" - f"{params_required=}") - return - - wf_data_file = results["wf_data_file"] - wf_method = results["wf_method"] - - if wf_method not in WF_METHODS.keys(): - print(f"ERROR: Unknown whitefield correction method {wf_method}. Skipping\n" - f"{params_required=}") - return - - wf_dataset = results.get("wf_dataset", "data/data") - # TODO: cache white field data, only reload if file changed - # maybe store checksum in results as "_checksum" - try: - with h5py.File(wf_data_file, "r") as wfile: - whitefield_image = np.asarray(wfile[wf_dataset]) - except Exception as error: - print(f"ERROR: Can't read whitefield from file {wf_data_file}. Skipping\n" - f"{error=}") - return - - try: - WF_METHODS[wf_method](data, whitefield_image) - except Exception as error: - print(f"ERROR: White field correction failed.\n" - f"{error=}") - else: - results["white_field_correction_applied"] = 1 - - return whitefield_image \ No newline at end of file diff --git a/dap/worker.py b/dap/worker.py index 02f0c0b..42f2e13 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -2,8 +2,9 @@ import argparse import numpy as np -from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, calc_radial_integration, calc_roi, calc_spi_analysis, - calc_apply_whitefield_correction, calc_streakfinder_analysis, JFData) +from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, + calc_radial_integration, calc_roi, calc_spi_analysis, + calc_streakfinder_analysis, JFData) from utils import Aggregator, BufferedJSON, randskip, read_bit from zmqsocks import ZMQSockets @@ -117,11 +118,9 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host # ??? - # White-field correction and streak finder processing for convergent-beam diffraction - print(f"Applying whitefield correction") - calc_apply_whitefield_correction(results, image) # changes image in place - print(f"Searching streaks") - image = calc_streakfinder_analysis(results, image, pixel_mask_pf) # changes image in place is do_snr=True + # Streak finder processing for convergent-beam diffraction experiments + # changes image and mask in place if do_snr=True in parameters file + image = calc_streakfinder_analysis(results, image, pixel_mask_pf) print(f"Done\n{results=}") image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) -- 2.49.1 From 8e4301c443f40e90256cd667d7ccb96b5c7e36ff Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 7 Jul 2025 09:56:25 +0200 Subject: [PATCH 23/43] Update README with details of streak finder input and output parameters as well as example settings json --- README.md | 85 +++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 67 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index fe1b7a7..a177cf5 100644 --- a/README.md +++ b/README.md @@ -110,16 +110,31 @@ options: This algorithm is using [streak-finder package](https://github.com/simply-nicky/streak_finder) - a connection-based streak finding algorithm for convergent beam diffraction patterns. Input parameters: + * `'do_snr': 1/0` - Specifies whether to do whitefield and std correction, if selected - changes image and mask in place. + * `'std_data_file': str` - hdf5 data file containing pre-calculated std. + * `'std_dataset': str` - [optional] dataset containing pre-calculated std, defaults to `'entry/crystallography/std'`. + * `'whitefield_data_file': str` - hdf5 data file containing pre-calculated white field. + * `'whitefield_dataset': str` - [optional] dataset containing pre-calculated white field, defaults to `'entry/crystallography/whitefield'`. + * `'mask_data_file': str` - hdf5 data file containing pre-calculated mask. + * `'mask_dataset': str` - [optional] dataset containing pre-calculated mask, defaults to `'entry/instrument/detector/mask'`. + * `'scale_whitefield': 1/0` - Specifies whether to scale whitefield to signal, useful if intensity jumps. + * `'do_streakfinder_analysis': 1/0` - Specifies whether to execute the streak-finder algorithm. - * `'sf_structure_radius': int` - Connectivity structure radius. - * `'sf_structure_rank': int` - Connectivity structure rank. - * `'sf_min_size': float` - Minimum number of linelets required in a detected streak. - * `'sf_vmin': float` - Peak threshold. All peaks with values lower than ``sf_vmin`` are discarded. + * `'sf_peak_structure_radius': int` - Connectivity structure radius for *peaks* detection. + * `'sf_peak_structure_rank': int` - Connectivity structure rank for *peaks* detection. + * `'sf_peak_vmin': float` - Peak threshold. All peaks with values lower than ``sf_peak_vmin`` are discarded. * `'sf_npts': int` - Support size threshold. The support structure is a connected set of pixels which - value is above the threshold ``sf_vmin``. A peak is discarded is the size of support + value is above the threshold ``sf_peak_vmin``. A peak is discarded is the size of support set is lower than ``sf_npts``. + * `'sf_streak_structure_radius': int` - Connectivity structure radius for *streaks* detection. + * `'sf_streak_structure_rank': int` - Connectivity structure rank for *streaks* detection. + * `'sf_streak_vmin': float` - Streak threshold. All streaks with values lower than ``sf_vmin`` are discarded. + * `'sf_min_size': float` - Minimum number of linelets required in a detected streak. * `'sf_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the streak is no more than ``sf_xtol``. + * `'sf_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. + * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm + * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding; can be used to mask out bad regions as well as to reduce data size to e.g. one quadrant for the purpose of speedup. Algorithm Output: * `'number_of_streaks': int` - Indicates the count of identified streaks. @@ -258,22 +273,56 @@ Algorithms use input parameters specified in a JSON file provided to worker.py ( "roi_y1": [], "roi_x2": [], "roi_y2": [], - "do_whitefield_correction": 1, - "wf_data_file": "/sf/bernina/exp/00m_musterman/res/whitefield/JF07T32V01_wf_div.h5", - "wf_method": "div", - "apply_additional_mask_from_file": 1, - "mask_file": "/sf/bernina/exp/00m_musterman/res/mask/JF07T32V01_mask.h5", - "do_streakfinder_analysis": 1, - "sf_structure_radius": 10, - "sf_structure_rank": 2, - "sf_min_size": 7, - "sf_vmin": 75, - "sf_npts": 100, - "sf_xtol": 1.5 - } ``` +Example JSON for Convergent-Beam Diffraction Streak-Finder: + ```json + { + "beam_center_x": 1119.0, + "beam_center_y": 1068.0, + "detector_distance": 0.092, + "do_peakfinder_analysis": 0, + "beam_energy": 11993.610318642704, + "apply_threshold": 0, + "threshold_min": 0, + "threshold_max": 35, + "apply_aggregation": 0, + "aggregation_max": 2, + "double_pixels": "mask", + "detector_rate": 100, + "do_radial_integration": 0, + "do_spi_analysis": 0, + "threshold_value": "NaN", + "select_only_ppicker_events": 0, + "disabled_modules": [], + "roi_x1": [], + "roi_y1": [], + "roi_x2": [], + "roi_y2": [], + "do_snr": 0, + "std_data_file": "/sf/instrument/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "std_dataset": "entry/crystallography/std", + "whitefield_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "whitefield_dataset": "entry/crystallography/whitefield", + "mask_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/JF_mask.h5", + "mask_dataset": "mask_data", + "scale_whitefield": 0, + "do_streakfinder_analysis": 1, + "sf_peak_structure_radius": 2, + "sf_peak_structure_rank": 2, + "sf_peak_vmin": 50, + "sf_npts": 10, + "sf_streak_structure_radius": 6, + "sf_streak_structure_rank": 4, + "sf_xtol": 2.0, + "sf_streak_vmin": 30, + "sf_min_size": 25, + "sf_nfa": 1, + "sf_num_threads": 32, + "sf_mask_rois": [[0, 2216, 2107, 4215]] +} + ``` # Acknowledgment Special thanks to Valerio Mariani for providing the cython implementation of peakfinder8. -- 2.49.1 From d9ef09a69e3309e2e40fc25da91f8ef54da03c99 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Mon, 7 Jul 2025 10:05:29 +0200 Subject: [PATCH 24/43] Clean up debug prints; minor cleanup in README --- README.md | 3 ++- dap/algos/streakfind.py | 4 +--- dap/worker.py | 1 - 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a177cf5..a162d72 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,7 @@ options: * `'bragg_counts': list[float]` - Provides the intensity sum within identified streaks. * `'streaks': 4*list[float]` - Provides coordinates of the identified streaks: x0, y0, x1, y1. * `'is_hit_frame': True/False` - Marks whether a frame qualifies as a hit based on the number of identified streaks. + * `'cbd_error': str` - An error message in case the streak finder failed on one of it's stages. * **Radial Profile Integration** @@ -272,7 +273,7 @@ Algorithms use input parameters specified in a JSON file provided to worker.py ( "roi_x1": [], "roi_y1": [], "roi_x2": [], - "roi_y2": [], + "roi_y2": [] } ``` diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index f4c652f..6b467b5 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -13,6 +13,7 @@ from streak_finder.label import Structure2D DEFAULT_NUM_THREADS = 16 + def calc_streakfinder_analysis(results, data, pf_pixel_mask): do_snr = results.get("do_snr", False) do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) @@ -22,7 +23,6 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): try: cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) except Exception as error: # Broad exception - we don't want to break anything here - print(f"Error processing CBD data:\n{error}") results["cbd_error"] = f"Error processing CBD data:\n{error}" return data @@ -34,7 +34,6 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): try: _calc_streakfinder_analysis(results, cryst_data) except Exception as error: # Broad exception - we don't want to break anything here - print(f"Error processing CBD data:\n{error}") results["cbd_error"] = f"Error processing CBD data:\n{error}" return data @@ -164,7 +163,6 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): streak_lines = streak_lines.T _, number_of_streaks = streak_lines.shape - print(f"Found {number_of_streaks} streaks") list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 diff --git a/dap/worker.py b/dap/worker.py index 42f2e13..18ba946 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -121,7 +121,6 @@ def work(backend_address, accumulator_host, accumulator_port, visualisation_host # Streak finder processing for convergent-beam diffraction experiments # changes image and mask in place if do_snr=True in parameters file image = calc_streakfinder_analysis(results, image, pixel_mask_pf) - print(f"Done\n{results=}") image, aggregation_is_ready = calc_apply_aggregation(results, image, pixel_mask_pf, aggregator) -- 2.49.1 From 969ac8ffcf605ba8e7c0fc4e1d3c0d125f457d17 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Thu, 10 Jul 2025 09:26:17 +0200 Subject: [PATCH 25/43] First step towards running algorithm on cropped image --- dap/algos/streakfind.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 6b467b5..5f9aa73 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -19,7 +19,8 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) if not do_snr and not do_streakfinder_analysis: return data - + # Shift to min=0 + data = data - np.min(data) try: cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) except Exception as error: # Broad exception - we don't want to break anything here @@ -127,6 +128,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): for mask_roi in mask_rois: cryst_data = cryst_data.mask_region(mask_roi) + crop_roi = results.get("sf_crop_roi", None) # [y_min, y_max, x_min, x_max] + + if crop_roi is not None: + cryst_data = cryst_data.crop(crop_roi) + peaks_structure = Structure2D(peak_structure_radius, peak_structure_rank) streaks_structure = Structure2D(streak_structure_radius, streak_structure_rank) @@ -148,7 +154,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): return streaks = det_obj.to_streaks(detected) - detected_streaks = np.asarray(list(detected.streaks.values())) + detected_streaks = np.asarray(detected.streaks) streak_lines = streaks.lines if x_center is not None and y_center is not None: -- 2.49.1 From ea28335ae02c75f39502e9f9a8a5a0c6dfb95831 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Thu, 10 Jul 2025 09:33:36 +0200 Subject: [PATCH 26/43] For final streak finder result, take into account crop region shift --- dap/algos/streakfind.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 5f9aa73..5676dd9 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -157,6 +157,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): detected_streaks = np.asarray(detected.streaks) streak_lines = streaks.lines + # Adjust to crop region + if crop_roi is not None: + shift = [crop_roi[0], crop_roi[2], crop_roi[0], crop_roi[2]] + streak_lines = streak_lines + shift + if x_center is not None and y_center is not None: streaks_mask = streaks.concentric_only(x_center, y_center) streak_lines = streak_lines[streaks_mask] -- 2.49.1 From 84b6ef3c52e2858360597d9847e28233b2d44569 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Thu, 10 Jul 2025 10:07:19 +0200 Subject: [PATCH 27/43] For concentric streaks detection, take into account crop region shift --- dap/algos/streakfind.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 5676dd9..16af5cd 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -131,6 +131,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): crop_roi = results.get("sf_crop_roi", None) # [y_min, y_max, x_min, x_max] if crop_roi is not None: + cryst_data = cryst_data.crop(crop_roi) peaks_structure = Structure2D(peak_structure_radius, peak_structure_rank) @@ -159,10 +160,13 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): # Adjust to crop region if crop_roi is not None: - shift = [crop_roi[0], crop_roi[2], crop_roi[0], crop_roi[2]] + shift = [crop_roi[2], crop_roi[0], crop_roi[0], crop_roi[2]] streak_lines = streak_lines + shift if x_center is not None and y_center is not None: + if crop_roi is not None: + x_center -= crop_roi[0] + y_center -= crop_roi[2] streaks_mask = streaks.concentric_only(x_center, y_center) streak_lines = streak_lines[streaks_mask] detected_streaks = detected_streaks[streaks_mask] -- 2.49.1 From b3ec993f2f565f90b2914c7a402321aad75b0472 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Thu, 10 Jul 2025 10:08:37 +0200 Subject: [PATCH 28/43] Swap x/t for crop roi for cryst data --- dap/algos/streakfind.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 16af5cd..200b568 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -131,8 +131,8 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): crop_roi = results.get("sf_crop_roi", None) # [y_min, y_max, x_min, x_max] if crop_roi is not None: - - cryst_data = cryst_data.crop(crop_roi) + crop_roi_t = [crop_roi[2], crop_roi[3], crop_roi[0], crop_roi[1]]# y0, y1, x0, x1 + cryst_data = cryst_data.crop(crop_roi_t) peaks_structure = Structure2D(peak_structure_radius, peak_structure_rank) streaks_structure = Structure2D(streak_structure_radius, streak_structure_rank) -- 2.49.1 From 2bd3e875e34ad23cd8b7d3866bf01433546a6f2a Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Thu, 10 Jul 2025 10:26:38 +0200 Subject: [PATCH 29/43] Bug fix in streaks coordintaes shift --- dap/algos/streakfind.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 200b568..05ed131 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -160,7 +160,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): # Adjust to crop region if crop_roi is not None: - shift = [crop_roi[2], crop_roi[0], crop_roi[0], crop_roi[2]] + shift = [crop_roi[0], crop_roi[2], crop_roi[0], crop_roi[2]] streak_lines = streak_lines + shift if x_center is not None and y_center is not None: -- 2.49.1 From a304ff1d9db6baae008471ab3d03cca8fa74607a Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Thu, 10 Jul 2025 10:33:38 +0200 Subject: [PATCH 30/43] Added crop roi to readme --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a162d72..ac8310d 100644 --- a/README.md +++ b/README.md @@ -134,7 +134,8 @@ options: streak is no more than ``sf_xtol``. * `'sf_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm - * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding; can be used to mask out bad regions as well as to reduce data size to e.g. one quadrant for the purpose of speedup. + * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding. + * `'sf_crop_roi': [int, int, int, int]` - [optional] run streak finder on a cropped image, e.g. one quadrant, for purpose of spedup. Algorithm Output: * `'number_of_streaks': int` - Indicates the count of identified streaks. @@ -321,7 +322,7 @@ Example JSON for Convergent-Beam Diffraction Streak-Finder: "sf_min_size": 25, "sf_nfa": 1, "sf_num_threads": 32, - "sf_mask_rois": [[0, 2216, 2107, 4215]] + "sf_crop_roi": [0, 2107, 0, 2216] } ``` # Acknowledgment -- 2.49.1 From 094371a63b14d52705538bc048ae7c6a2d8d4956 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 07:59:19 +0200 Subject: [PATCH 31/43] Simplify converting frame to stack for CrystData --- dap/algos/streakfind.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 05ed131..d4260a3 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -73,7 +73,7 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: std = np.asarray(hf[std_dataset]) data = CrystData( - data=data.reshape((-1,) + data.shape[-2:]), + data=data[np.newaxis, :], mask=mask*pf_pixel_mask, std=std, whitefield=whitefield -- 2.49.1 From 152b2342d7b43e93a82e5772b2c2e45744f00dea Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:06:29 +0200 Subject: [PATCH 32/43] Simplify results dictionary updates in streak finder --- dap/algos/streakfind.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index d4260a3..cdca48f 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -147,11 +147,11 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): detected = detected[0] if not detected.streaks: - results.update({"number_of_streaks": 0}) - results.update({"is_hit_frame": False}) - results.update({"streaks": []}) - results.update({"streak_lengths": []}) - results.update({"bragg_counts": []}) + results["number_of_streaks"] = 0 + results["is_hit_frame"] = False + results["streaks"] = [] + results["streak_lengths"] = [] + results["bragg_counts"] = [] return streaks = det_obj.to_streaks(detected) @@ -183,8 +183,8 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): bragg_counts = [streak.total_mass() for streak in detected_streaks] - results.update({"number_of_streaks": number_of_streaks}) - results.update({"is_hit_frame": number_of_streaks > 0}) - results.update({"streaks": list_result}) - results.update({"streak_lengths": streak_lengths}) - results.update({"bragg_counts": bragg_counts}) + results["number_of_streaks"] = number_of_streaks + results["is_hit_frame"] = (number_of_streaks > 5) + results["streaks"] = list_result + results["streak_lengths"] = streak_lengths + results["bragg_counts"] = bragg_counts -- 2.49.1 From c5ba140c719dfc042f023f2287a8ca8d91841233 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:08:22 +0200 Subject: [PATCH 33/43] Format import --- dap/worker.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dap/worker.py b/dap/worker.py index b7f9f4a..23a4de5 100644 --- a/dap/worker.py +++ b/dap/worker.py @@ -2,9 +2,10 @@ import argparse import numpy as np -from algos import (calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, - calc_radial_integration, calc_roi, calc_spi_analysis, - calc_streakfinder_analysis, JFData) +from algos import ( + calc_apply_aggregation, calc_apply_threshold, calc_mask_pixels, calc_peakfinder_analysis, + calc_radial_integration, calc_roi, calc_spi_analysis, calc_streakfinder_analysis, JFData +) from utils import Aggregator, BufferedJSON, randskip, read_bit from zmqsocks import ZMQSockets -- 2.49.1 From 1ef801c37ebfa12e119804dacb7b56fea1199d47 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:11:42 +0200 Subject: [PATCH 34/43] Add parameter sf_min_hit_streaks to streak finder - Minimum number of discovered streaks to categorize frame as a hit --- README.md | 1 + dap/algos/streakfind.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 701eea5..a3442d7 100644 --- a/README.md +++ b/README.md @@ -134,6 +134,7 @@ options: streak is no more than ``sf_xtol``. * `'sf_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm + * `'sf_min_hit_streaks': int` - Minimum number of discovered streaks to categorize frame as a hit. * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding. * `'sf_crop_roi': [int, int, int, int]` - [optional] run streak finder on a cropped image, e.g. one quadrant, for purpose of spedup. diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index cdca48f..37ad48e 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -12,6 +12,7 @@ from streak_finder import CrystData from streak_finder.label import Structure2D DEFAULT_NUM_THREADS = 16 +DEFAULT_MIN_HIT_STREAKS = 5 def calc_streakfinder_analysis(results, data, pf_pixel_mask): @@ -120,6 +121,8 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): nfa = results["sf_nfa"] num_threads = results["sf_num_threads"] + min_hit_streaks = results.get("sf_min_hit_streaks", DEFAULT_MIN_HIT_STREAKS) + x_center = results.get("beam_center_x", None) y_center = results.get("beam_center_y", None) @@ -184,7 +187,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): bragg_counts = [streak.total_mass() for streak in detected_streaks] results["number_of_streaks"] = number_of_streaks - results["is_hit_frame"] = (number_of_streaks > 5) + results["is_hit_frame"] = (number_of_streaks > min_hit_streaks) results["streaks"] = list_result results["streak_lengths"] = streak_lengths results["bragg_counts"] = bragg_counts -- 2.49.1 From b6df4986deb8d236f8c8cbe9dfe45fb87a679328 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:19:10 +0200 Subject: [PATCH 35/43] Further simplify, get rid of redundant brackets --- dap/algos/streakfind.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 37ad48e..215fe89 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -48,7 +48,7 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: "scale_whitefield", # Bool ] - if not all([param in results.keys() for param in params_required]): + if not all(param in results.keys() for param in params_required): raise ValueError(f"ERROR: Not enough parameters for CBD correction. Skipping\n" f"{params_required=}") @@ -104,7 +104,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): "sf_num_threads", ] - if not all([param in results.keys() for param in params_required]): + if not all(param in results.keys() for param in params_required): print(f"ERROR: Not enough parameters for streak finder analysis. Skipping.\n" f"{params_required=}") return @@ -182,7 +182,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): streak_lines = streak_lines.T _, number_of_streaks = streak_lines.shape - list_result = [line.tolist() for line in streak_lines] # arr(4, n_lines); 0coord x0, y0, x1, y1 + list_result = streak_lines.tolist() # arr(4, n_lines); 0coord x0, y0, x1, y1 bragg_counts = [streak.total_mass() for streak in detected_streaks] -- 2.49.1 From 2b17a8bd3a0e333ac9f3e392da6a908b9e2b3c21 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:53:18 +0200 Subject: [PATCH 36/43] Configurable negative values handler for streak finder --- README.md | 6 ++++-- dap/algos/streakfind.py | 17 +++++++++++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index a3442d7..56ab102 100644 --- a/README.md +++ b/README.md @@ -133,8 +133,9 @@ options: * `'sf_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the streak is no more than ``sf_xtol``. * `'sf_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. - * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm - * `'sf_min_hit_streaks': int` - Minimum number of discovered streaks to categorize frame as a hit. + * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm. + * `'sf_negative_handler': 'mask'/'zero'/'shift'/''` - [optional] Method to handle negative values in converted frames, defaults to `''` (do not handle). + * `'sf_min_hit_streaks': int` - [optional] Minimum number of discovered streaks to categorize frame as a hit, defaults to 5. * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding. * `'sf_crop_roi': [int, int, int, int]` - [optional] run streak finder on a cropped image, e.g. one quadrant, for purpose of spedup. @@ -312,6 +313,7 @@ Example JSON for Convergent-Beam Diffraction Streak-Finder: "mask_dataset": "mask_data", "scale_whitefield": 0, "do_streakfinder_analysis": 1, + "sf_negative_handler": "zero", "sf_peak_structure_radius": 2, "sf_peak_structure_rank": 2, "sf_peak_vmin": 50, diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 215fe89..01e5ce7 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -15,13 +15,26 @@ DEFAULT_NUM_THREADS = 16 DEFAULT_MIN_HIT_STREAKS = 5 +def _handle_negative_values(data, handler: str): + if not handler or np.all(data>=0): + return + if handler == "shift": + # Shift to min=0 + data -= np.min(data) + elif handler == "mask": + data[data<0] = np.nan + elif handler == "zero": + data[data<0] = 0 + def calc_streakfinder_analysis(results, data, pf_pixel_mask): do_snr = results.get("do_snr", False) do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) if not do_snr and not do_streakfinder_analysis: return data - # Shift to min=0 - data = data - np.min(data) + + negative_val_handler = results.get("sf_negative_handler", "") + _handle_negative_values(data, negative_val_handler) + try: cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) except Exception as error: # Broad exception - we don't want to break anything here -- 2.49.1 From e756b70655f471966875f646317880e15975a5bc Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 08:57:24 +0200 Subject: [PATCH 37/43] Credits --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 56ab102..a8d23ed 100644 --- a/README.md +++ b/README.md @@ -332,4 +332,4 @@ Example JSON for Convergent-Beam Diffraction Streak-Finder: Special thanks to Valerio Mariani for providing the cython implementation of peakfinder8. -Special thanks to Nikolai Ivanov for providing the cython implementation of streak-finder. +Special thanks to Nikolai Ivanov for providing the C++ implementation of streak-finder as well as Lisa Dorofeeva for integrating it. -- 2.49.1 From d8e38df39a2a1779203e04d7e2b9a17c11d35054 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 10:16:05 +0200 Subject: [PATCH 38/43] Option to read additional mask from file; Applied on startup only --- README.md | 10 ++++++++++ dap/algos/__init__.py | 1 + dap/algos/addmaskfile.py | 36 ++++++++++++++++++++++++++++++++++++ dap/algos/jfdata.py | 2 ++ 4 files changed, 49 insertions(+) create mode 100644 dap/algos/addmaskfile.py diff --git a/README.md b/README.md index a8d23ed..73d2c0b 100644 --- a/README.md +++ b/README.md @@ -239,6 +239,16 @@ options: Use the `'apply_additional_mask': 0/1` - Input flag to enable this functionality. + * **Additional Mask from file** + + Alternative to previous additional masking, mask data is read from specified file. NumPy and HDF5 formats are supported. + Input parameters: + * `'apply_additional_mask': 1/0` - Input flag to enable this functionality. + * `'mask_file': str` - Path to the hdf5 or npy file with mask data. + * `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"mask_data"`. + Algorithm Output: + * `'mask_from_file_applied': 1/0` - Indicates whether the algorithm ran successfully. + * **Filter based on pulse picker information** If the event propagation capability is accessible for the detector and the pulse picker information is correctly configured for propagation, the filtration based on pulse picker information becomes feasible by using the diff --git a/dap/algos/__init__.py b/dap/algos/__init__.py index 501d88d..d5e8f0b 100644 --- a/dap/algos/__init__.py +++ b/dap/algos/__init__.py @@ -1,5 +1,6 @@ from .addmask import calc_apply_additional_mask +from .addmaskfile import calc_apply_additional_mask_from_file from .aggregation import calc_apply_aggregation from .jfdata import JFData from .mask import calc_mask_pixels diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py new file mode 100644 index 0000000..772900a --- /dev/null +++ b/dap/algos/addmaskfile.py @@ -0,0 +1,36 @@ +import h5py +import numpy as np + + +DEFAULT_MASK_DATASET = "mask_data" + + +def calc_apply_additional_mask_from_file(results, pixel_mask_pf): + apply_additional_mask = results.get("apply_additional_mask_from_file", False) + if not apply_additional_mask: + return + + mask_file = results.get("mask_file", None) + if not mask_file: + return + mask_dataset = results.get("mask_ds", DEFAULT_MASK_DATASET) + + # Support for hdf5 and npy + if mask_file.endswith(".npy"): + try: + mask = np.load(mask_file) + except Exception as error: + results["mask_error"] = f"Error loading mask data from NumPy file {mask_file}:\n{error}" + return + else: + try: + with h5py.File(mask_file, "r") as h5f: + mask = np.asarray(h5f[mask_dataset], dtype=np.bool) + except Exception as error: + results["mask_error"] = f"Error loading mask from hdf5 file {mask_file}:\n{error}" + return + + try: + np.multiply(pixel_mask_pf, mask, out=pixel_mask_pf) + except Exception as error: + results["mask_error"] = f"Error applying additional mask from file {mask_file}:\n{error}" diff --git a/dap/algos/jfdata.py b/dap/algos/jfdata.py index bd2fa1e..1a578bf 100644 --- a/dap/algos/jfdata.py +++ b/dap/algos/jfdata.py @@ -3,6 +3,7 @@ import numpy as np import jungfrau_utils as ju from .addmask import calc_apply_additional_mask +from .addmaskfile import calc_apply_additional_mask_from_file class JFData: @@ -58,6 +59,7 @@ class JFData: pixel_mask_pf = np.ascontiguousarray(pixel_mask_corrected) calc_apply_additional_mask(results, pixel_mask_pf) # changes pixel_mask_pf in place + calc_apply_additional_mask_from_file(results, pixel_mask_pf) # further changes pixel_mask_pf in place self.id_pixel_mask_corrected = new_id_pixel_mask_corrected self.pixel_mask_pf = pixel_mask_pf -- 2.49.1 From e3e701a4daa9c2f588eaca07c45095c1fdc86b09 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Fri, 11 Jul 2025 11:09:17 +0200 Subject: [PATCH 39/43] Masking negative values for streak finder should be reflected in mask, not in data --- dap/algos/streakfind.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 01e5ce7..1409f61 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -15,17 +15,18 @@ DEFAULT_NUM_THREADS = 16 DEFAULT_MIN_HIT_STREAKS = 5 -def _handle_negative_values(data, handler: str): +def _handle_negative_values(data, mask, handler: str): if not handler or np.all(data>=0): return if handler == "shift": # Shift to min=0 data -= np.min(data) elif handler == "mask": - data[data<0] = np.nan + mask[data<0] = np.nan elif handler == "zero": data[data<0] = 0 + def calc_streakfinder_analysis(results, data, pf_pixel_mask): do_snr = results.get("do_snr", False) do_streakfinder_analysis = results.get("do_streakfinder_analysis", False) @@ -33,7 +34,7 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): return data negative_val_handler = results.get("sf_negative_handler", "") - _handle_negative_values(data, negative_val_handler) + _handle_negative_values(data, pf_pixel_mask, negative_val_handler) try: cryst_data = _generate_cryst_data(results, data, pf_pixel_mask) -- 2.49.1 From 7169ce5acb64d52fff3dcb06aafa146caf118837 Mon Sep 17 00:00:00 2001 From: "Dorofeeva Elizaveta (EXT)" Date: Fri, 11 Jul 2025 11:22:45 +0200 Subject: [PATCH 40/43] We know that detected streaks are list, use np.array --- dap/algos/streakfind.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index 1409f61..b55dd4e 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -172,7 +172,7 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): return streaks = det_obj.to_streaks(detected) - detected_streaks = np.asarray(detected.streaks) + detected_streaks = np.array(detected.streaks) streak_lines = streaks.lines # Adjust to crop region -- 2.49.1 From 4eb4bb76dcfd8aaab858e062b23e09faa9b4eab3 Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 14:47:20 +0200 Subject: [PATCH 41/43] Ensure single read and no copy for hdf5 data --- dap/algos/addmaskfile.py | 4 ++-- dap/algos/streakfind.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py index 772900a..9c9ce0e 100644 --- a/dap/algos/addmaskfile.py +++ b/dap/algos/addmaskfile.py @@ -18,14 +18,14 @@ def calc_apply_additional_mask_from_file(results, pixel_mask_pf): # Support for hdf5 and npy if mask_file.endswith(".npy"): try: - mask = np.load(mask_file) + mask = np.load(mask_file).astype(np.bool) except Exception as error: results["mask_error"] = f"Error loading mask data from NumPy file {mask_file}:\n{error}" return else: try: with h5py.File(mask_file, "r") as h5f: - mask = np.asarray(h5f[mask_dataset], dtype=np.bool) + mask = h5f[mask_dataset][:].astype(np.bool) except Exception as error: results["mask_error"] = f"Error loading mask from hdf5 file {mask_file}:\n{error}" return diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index b55dd4e..d42e50d 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -79,13 +79,13 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: num_threads = results.get("num_threads", DEFAULT_NUM_THREADS) with h5py.File(whitefield_data_file, "r") as hf: - whitefield = np.asarray(hf[whitefield_dataset]) + whitefield = hf[whitefield_dataset][:] with h5py.File(mask_data_file, "r") as hf: - mask = np.asarray(hf[mask_dataset], dtype=np.bool) + mask = hf[mask_dataset][:].astype(np.bool) with h5py.File(std_data_file, "r") as hf: - std = np.asarray(hf[std_dataset]) + std = hf[std_dataset][:] data = CrystData( data=data[np.newaxis, :], -- 2.49.1 From 193f531d5cbd0f72c753a2d9a582ddddd5ab5a8d Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 14:57:20 +0200 Subject: [PATCH 42/43] Correct setting name for mask form file dataset; prepend all strak-finder-specific settings names with sf; Make streak-finder mask optional now that we have mask from file --- README.md | 33 ++++++++++++++++++--------------- dap/algos/addmaskfile.py | 2 +- dap/algos/streakfind.py | 36 +++++++++++++++++++----------------- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 73d2c0b..0f38f66 100644 --- a/README.md +++ b/README.md @@ -111,13 +111,13 @@ options: Input parameters: * `'do_snr': 1/0` - Specifies whether to do whitefield and std correction, if selected - changes image and mask in place. - * `'std_data_file': str` - hdf5 data file containing pre-calculated std. - * `'std_dataset': str` - [optional] dataset containing pre-calculated std, defaults to `'entry/crystallography/std'`. - * `'whitefield_data_file': str` - hdf5 data file containing pre-calculated white field. - * `'whitefield_dataset': str` - [optional] dataset containing pre-calculated white field, defaults to `'entry/crystallography/whitefield'`. - * `'mask_data_file': str` - hdf5 data file containing pre-calculated mask. - * `'mask_dataset': str` - [optional] dataset containing pre-calculated mask, defaults to `'entry/instrument/detector/mask'`. - * `'scale_whitefield': 1/0` - Specifies whether to scale whitefield to signal, useful if intensity jumps. + * `'sf_std_data_file': str` - hdf5 data file containing pre-calculated std. + * `'sf_std_dataset': str` - [optional] dataset containing pre-calculated std, defaults to `'entry/crystallography/std'`. + * `'sf_whitefield_data_file': str` - hdf5 data file containing pre-calculated white field. + * `'sf_whitefield_dataset': str` - [optional] dataset containing pre-calculated white field, defaults to `'entry/crystallography/whitefield'`. + * `'sf_mask_data_file': str` - [optional] hdf5 data file containing pre-calculated mask. + * `'sf_mask_dataset': str` - [optional] dataset containing pre-calculated mask, defaults to `'entry/instrument/detector/mask'`. + * `'sf_scale_whitefield': 1/0` - Specifies whether to scale whitefield to signal, useful if intensity jumps. * `'do_streakfinder_analysis': 1/0` - Specifies whether to execute the streak-finder algorithm. * `'sf_peak_structure_radius': int` - Connectivity structure radius for *peaks* detection. @@ -245,7 +245,7 @@ options: Input parameters: * `'apply_additional_mask': 1/0` - Input flag to enable this functionality. * `'mask_file': str` - Path to the hdf5 or npy file with mask data. - * `'mask_ds': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"mask_data"`. + * `'mask_dataset': str` [Optional] - Name of the dataset containing mask in the hdf5 file, default is `"mask_data"`. Algorithm Output: * `'mask_from_file_applied': 1/0` - Indicates whether the algorithm ran successfully. @@ -314,14 +314,17 @@ Example JSON for Convergent-Beam Diffraction Streak-Finder: "roi_y1": [], "roi_x2": [], "roi_y2": [], - "do_snr": 0, - "std_data_file": "/sf/instrument/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", - "std_dataset": "entry/crystallography/std", - "whitefield_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", - "whitefield_dataset": "entry/crystallography/whitefield", - "mask_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/JF_mask.h5", + "": 1, + "mask_file": "/sf/jungfrau/config/additional_mask/JF07T32V02.h5", "mask_dataset": "mask_data", - "scale_whitefield": 0, + "do_snr": 0, + "sf_std_data_file": "/sf/instrument/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "sf_std_dataset": "entry/crystallography/std", + "sf_whitefield_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "sf_whitefield_dataset": "entry/crystallography/whitefield", + "sf_mask_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/JF_mask.h5", + "sf_mask_dataset": "mask_data", + "sf_scale_whitefield": 0, "do_streakfinder_analysis": 1, "sf_negative_handler": "zero", "sf_peak_structure_radius": 2, diff --git a/dap/algos/addmaskfile.py b/dap/algos/addmaskfile.py index 9c9ce0e..4f82ee5 100644 --- a/dap/algos/addmaskfile.py +++ b/dap/algos/addmaskfile.py @@ -13,7 +13,7 @@ def calc_apply_additional_mask_from_file(results, pixel_mask_pf): mask_file = results.get("mask_file", None) if not mask_file: return - mask_dataset = results.get("mask_ds", DEFAULT_MASK_DATASET) + mask_dataset = results.get("mask_dataset", DEFAULT_MASK_DATASET) # Support for hdf5 and npy if mask_file.endswith(".npy"): diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index d42e50d..e307d71 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -45,7 +45,6 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): if do_snr: # Changes data and mask in-place data = cryst_data.snr[0].copy() - np.multiply(pf_pixel_mask, cryst_data.mask, out=pf_pixel_mask) try: _calc_streakfinder_analysis(results, cryst_data) @@ -56,40 +55,43 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: params_required = [ - "whitefield_data_file", - "mask_data_file", - "std_data_file", - "scale_whitefield", # Bool + "sf_whitefield_data_file", + "sf_std_data_file", + "sf_scale_whitefield", # Bool ] if not all(param in results.keys() for param in params_required): raise ValueError(f"ERROR: Not enough parameters for CBD correction. Skipping\n" f"{params_required=}") - whitefield_data_file = results["whitefield_data_file"] - mask_data_file = results["mask_data_file"] - std_data_file = results["std_data_file"] - scale_whitefield = results["scale_whitefield"] + whitefield_data_file = results["sf_whitefield_data_file"] + std_data_file = results["sf_std_data_file"] + scale_whitefield = results["sf_scale_whitefield"] # Using CXI Store specification as default - whitefield_dataset = results.get("whitefield_dataset", "entry/crystallography/whitefield") - mask_dataset = results.get("mask_dataset", "entry/instrument/detector/mask") - std_dataset = results.get("std_dataset", "entry/crystallography/std") + whitefield_dataset = results.get("sf_whitefield_dataset", "entry/crystallography/whitefield") + std_dataset = results.get("sf_std_dataset", "entry/crystallography/std") - num_threads = results.get("num_threads", DEFAULT_NUM_THREADS) + num_threads = results.get("sf_num_threads", DEFAULT_NUM_THREADS) with h5py.File(whitefield_data_file, "r") as hf: whitefield = hf[whitefield_dataset][:] - with h5py.File(mask_data_file, "r") as hf: - mask = hf[mask_dataset][:].astype(np.bool) - with h5py.File(std_data_file, "r") as hf: std = hf[std_dataset][:] + mask_data_file = results.get("sf_mask_data_file", None) + if mask_data_file is None: + mask = pf_pixel_mask + else: + mask_dataset = results.get("sf_mask_dataset", "entry/instrument/detector/mask") + with h5py.File(mask_data_file, "r") as hf: + mask = hf[mask_dataset][:].astype(np.bool) + mask *= pf_pixel_mask + data = CrystData( data=data[np.newaxis, :], - mask=mask*pf_pixel_mask, + mask=mask, std=std, whitefield=whitefield ) -- 2.49.1 From 1d88f33cf301c1cd59a325c30313c350f7dc6a2d Mon Sep 17 00:00:00 2001 From: Lisa Dorofeeva Date: Fri, 11 Jul 2025 15:29:51 +0200 Subject: [PATCH 43/43] Replace sf_ prefix with cbd_ for streak finder params; We are at SwissFell after all --- README.md | 90 ++++++++++++++++++++--------------------- dap/algos/streakfind.py | 74 ++++++++++++++++----------------- 2 files changed, 82 insertions(+), 82 deletions(-) diff --git a/README.md b/README.md index 0f38f66..227ba92 100644 --- a/README.md +++ b/README.md @@ -111,33 +111,33 @@ options: Input parameters: * `'do_snr': 1/0` - Specifies whether to do whitefield and std correction, if selected - changes image and mask in place. - * `'sf_std_data_file': str` - hdf5 data file containing pre-calculated std. - * `'sf_std_dataset': str` - [optional] dataset containing pre-calculated std, defaults to `'entry/crystallography/std'`. - * `'sf_whitefield_data_file': str` - hdf5 data file containing pre-calculated white field. - * `'sf_whitefield_dataset': str` - [optional] dataset containing pre-calculated white field, defaults to `'entry/crystallography/whitefield'`. - * `'sf_mask_data_file': str` - [optional] hdf5 data file containing pre-calculated mask. - * `'sf_mask_dataset': str` - [optional] dataset containing pre-calculated mask, defaults to `'entry/instrument/detector/mask'`. - * `'sf_scale_whitefield': 1/0` - Specifies whether to scale whitefield to signal, useful if intensity jumps. + * `'cbd_std_data_file': str` - hdf5 data file containing pre-calculated std. + * `'cbd_std_dataset': str` - [optional] dataset containing pre-calculated std, defaults to `'entry/crystallography/std'`. + * `'cbd_whitefield_data_file': str` - hdf5 data file containing pre-calculated white field. + * `'cbd_whitefield_dataset': str` - [optional] dataset containing pre-calculated white field, defaults to `'entry/crystallography/whitefield'`. + * `'cbd_mask_data_file': str` - [optional] hdf5 data file containing pre-calculated mask. + * `'cbd_mask_dataset': str` - [optional] dataset containing pre-calculated mask, defaults to `'entry/instrument/detector/mask'`. + * `'cbd_scale_whitefield': 1/0` - Specifies whether to scale whitefield to signal, useful if intensity jumps. * `'do_streakfinder_analysis': 1/0` - Specifies whether to execute the streak-finder algorithm. - * `'sf_peak_structure_radius': int` - Connectivity structure radius for *peaks* detection. - * `'sf_peak_structure_rank': int` - Connectivity structure rank for *peaks* detection. - * `'sf_peak_vmin': float` - Peak threshold. All peaks with values lower than ``sf_peak_vmin`` are discarded. - * `'sf_npts': int` - Support size threshold. The support structure is a connected set of pixels which - value is above the threshold ``sf_peak_vmin``. A peak is discarded is the size of support - set is lower than ``sf_npts``. - * `'sf_streak_structure_radius': int` - Connectivity structure radius for *streaks* detection. - * `'sf_streak_structure_rank': int` - Connectivity structure rank for *streaks* detection. - * `'sf_streak_vmin': float` - Streak threshold. All streaks with values lower than ``sf_vmin`` are discarded. - * `'sf_min_size': float` - Minimum number of linelets required in a detected streak. - * `'sf_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the - streak is no more than ``sf_xtol``. - * `'sf_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. - * `'sf_num_threads': int` - Number of threads to use for peak finder algorithm. - * `'sf_negative_handler': 'mask'/'zero'/'shift'/''` - [optional] Method to handle negative values in converted frames, defaults to `''` (do not handle). - * `'sf_min_hit_streaks': int` - [optional] Minimum number of discovered streaks to categorize frame as a hit, defaults to 5. - * `'sf_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding. - * `'sf_crop_roi': [int, int, int, int]` - [optional] run streak finder on a cropped image, e.g. one quadrant, for purpose of spedup. + * `'cbd_peak_structure_radius': int` - Connectivity structure radius for *peaks* detection. + * `'cbd_peak_structure_rank': int` - Connectivity structure rank for *peaks* detection. + * `'cbd_peak_vmin': float` - Peak threshold. All peaks with values lower than ``cbd_peak_vmin`` are discarded. + * `'cbd_npts': int` - Support size threshold. The support structure is a connected set of pixels which + value is above the threshold ``cbd_peak_vmin``. A peak is discarded is the size of support + set is lower than ``cbd_npts``. + * `'cbd_streak_structure_radius': int` - Connectivity structure radius for *streaks* detection. + * `'cbd_streak_structure_rank': int` - Connectivity structure rank for *streaks* detection. + * `'cbd_streak_vmin': float` - Streak threshold. All streaks with values lower than ``cbd_vmin`` are discarded. + * `'cbd_min_size': float` - Minimum number of linelets required in a detected streak. + * `'cbd_xtol': float` - Distance threshold. A new linelet is added to a streak if it's distance to the + streak is no more than ``cbd_xtol``. + * `'cbd_nfa': 1` - Number of false alarms, allowed number of unaligned points in a streak. + * `'cbd_num_threads': int` - Number of threads to use for peak finder algorithm. + * `'cbd_negative_handler': 'mask'/'zero'/'shift'/''` - [optional] Method to handle negative values in converted frames, defaults to `''` (do not handle). + * `'cbd_min_hit_streaks': int` - [optional] Minimum number of discovered streaks to categorize frame as a hit, defaults to 5. + * `'cbd_mask_rois': list[(int, int, int, int)]` - [optional] list of `(y_min, y_max, x_min, x_max)` coordinates of ROIs to mask out during peak finding. + * `'cbd_crop_roi': [int, int, int, int]` - [optional] run streak finder on a cropped image, e.g. one quadrant, for purpose of spedup. Algorithm Output: * `'number_of_streaks': int` - Indicates the count of identified streaks. @@ -318,27 +318,27 @@ Example JSON for Convergent-Beam Diffraction Streak-Finder: "mask_file": "/sf/jungfrau/config/additional_mask/JF07T32V02.h5", "mask_dataset": "mask_data", "do_snr": 0, - "sf_std_data_file": "/sf/instrument/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", - "sf_std_dataset": "entry/crystallography/std", - "sf_whitefield_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", - "sf_whitefield_dataset": "entry/crystallography/whitefield", - "sf_mask_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/JF_mask.h5", - "sf_mask_dataset": "mask_data", - "sf_scale_whitefield": 0, + "cbd_std_data_file": "/sf/instrument/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "cbd_std_dataset": "entry/crystallography/std", + "cbd_whitefield_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/streakfinder_metadata.h5", + "cbd_whitefield_dataset": "entry/crystallography/whitefield", + "cbd_mask_data_file": "/sf/bernina/exp/00m_mustermann/res/aux_data/JF_mask.h5", + "cbd_mask_dataset": "mask_data", + "cbd_scale_whitefield": 0, "do_streakfinder_analysis": 1, - "sf_negative_handler": "zero", - "sf_peak_structure_radius": 2, - "sf_peak_structure_rank": 2, - "sf_peak_vmin": 50, - "sf_npts": 10, - "sf_streak_structure_radius": 6, - "sf_streak_structure_rank": 4, - "sf_xtol": 2.0, - "sf_streak_vmin": 30, - "sf_min_size": 25, - "sf_nfa": 1, - "sf_num_threads": 32, - "sf_crop_roi": [0, 2107, 0, 2216] + "cbd_negative_handler": "zero", + "cbd_peak_structure_radius": 2, + "cbd_peak_structure_rank": 2, + "cbd_peak_vmin": 50, + "cbd_npts": 10, + "cbd_streak_structure_radius": 6, + "cbd_streak_structure_rank": 4, + "cbd_xtol": 2.0, + "cbd_streak_vmin": 30, + "cbd_min_size": 25, + "cbd_nfa": 1, + "cbd_num_threads": 32, + "cbd_crop_roi": [0, 2107, 0, 2216] } ``` # Acknowledgment diff --git a/dap/algos/streakfind.py b/dap/algos/streakfind.py index e307d71..fc91a28 100644 --- a/dap/algos/streakfind.py +++ b/dap/algos/streakfind.py @@ -33,7 +33,7 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): if not do_snr and not do_streakfinder_analysis: return data - negative_val_handler = results.get("sf_negative_handler", "") + negative_val_handler = results.get("cbd_negative_handler", "") _handle_negative_values(data, pf_pixel_mask, negative_val_handler) try: @@ -55,24 +55,24 @@ def calc_streakfinder_analysis(results, data, pf_pixel_mask): def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: params_required = [ - "sf_whitefield_data_file", - "sf_std_data_file", - "sf_scale_whitefield", # Bool + "cbd_whitefield_data_file", + "cbd_std_data_file", + "cbd_scale_whitefield", # Bool ] if not all(param in results.keys() for param in params_required): raise ValueError(f"ERROR: Not enough parameters for CBD correction. Skipping\n" f"{params_required=}") - whitefield_data_file = results["sf_whitefield_data_file"] - std_data_file = results["sf_std_data_file"] - scale_whitefield = results["sf_scale_whitefield"] + whitefield_data_file = results["cbd_whitefield_data_file"] + std_data_file = results["cbd_std_data_file"] + scale_whitefield = results["cbd_scale_whitefield"] # Using CXI Store specification as default - whitefield_dataset = results.get("sf_whitefield_dataset", "entry/crystallography/whitefield") - std_dataset = results.get("sf_std_dataset", "entry/crystallography/std") + whitefield_dataset = results.get("cbd_whitefield_dataset", "entry/crystallography/whitefield") + std_dataset = results.get("cbd_std_dataset", "entry/crystallography/std") - num_threads = results.get("sf_num_threads", DEFAULT_NUM_THREADS) + num_threads = results.get("cbd_num_threads", DEFAULT_NUM_THREADS) with h5py.File(whitefield_data_file, "r") as hf: whitefield = hf[whitefield_dataset][:] @@ -80,11 +80,11 @@ def _generate_cryst_data(results, data, pf_pixel_mask) -> CrystData: with h5py.File(std_data_file, "r") as hf: std = hf[std_dataset][:] - mask_data_file = results.get("sf_mask_data_file", None) + mask_data_file = results.get("cbd_mask_data_file", None) if mask_data_file is None: mask = pf_pixel_mask else: - mask_dataset = results.get("sf_mask_dataset", "entry/instrument/detector/mask") + mask_dataset = results.get("cbd_mask_dataset", "entry/instrument/detector/mask") with h5py.File(mask_data_file, "r") as hf: mask = hf[mask_dataset][:].astype(np.bool) mask *= pf_pixel_mask @@ -107,17 +107,17 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): return params_required = [ - "sf_peak_structure_radius", - "sf_peak_structure_rank", - "sf_streak_structure_radius", - "sf_streak_structure_rank", - "sf_peak_vmin", - "sf_streak_vmin", - "sf_min_size", - "sf_npts", - "sf_xtol", - "sf_nfa", - "sf_num_threads", + "cbd_peak_structure_radius", + "cbd_peak_structure_rank", + "cbd_streak_structure_radius", + "cbd_streak_structure_rank", + "cbd_peak_vmin", + "cbd_streak_vmin", + "cbd_min_size", + "cbd_npts", + "cbd_xtol", + "cbd_nfa", + "cbd_num_threads", ] if not all(param in results.keys() for param in params_required): @@ -125,29 +125,29 @@ def _calc_streakfinder_analysis(results, cryst_data: CrystData): f"{params_required=}") return - peak_structure_radius = results["sf_peak_structure_radius"] # peak - peak_structure_rank = results["sf_peak_structure_rank"] - streak_structure_radius = results["sf_streak_structure_radius"] # streak - streak_structure_rank = results["sf_streak_structure_rank"] - peak_vmin = results["sf_peak_vmin"] # peak - streak_vmin = results["sf_streak_vmin"] # streak - min_size = results["sf_min_size"] - npts = results["sf_npts"] - xtol = results["sf_xtol"] - nfa = results["sf_nfa"] - num_threads = results["sf_num_threads"] + peak_structure_radius = results["cbd_peak_structure_radius"] # peak + peak_structure_rank = results["cbd_peak_structure_rank"] + streak_structure_radius = results["cbd_streak_structure_radius"] # streak + streak_structure_rank = results["cbd_streak_structure_rank"] + peak_vmin = results["cbd_peak_vmin"] # peak + streak_vmin = results["cbd_streak_vmin"] # streak + min_size = results["cbd_min_size"] + npts = results["cbd_npts"] + xtol = results["cbd_xtol"] + nfa = results["cbd_nfa"] + num_threads = results["cbd_num_threads"] - min_hit_streaks = results.get("sf_min_hit_streaks", DEFAULT_MIN_HIT_STREAKS) + min_hit_streaks = results.get("cbd_min_hit_streaks", DEFAULT_MIN_HIT_STREAKS) x_center = results.get("beam_center_x", None) y_center = results.get("beam_center_y", None) - mask_rois = results.get("sf_mask_rois", []) # list of [y_min, y_max, x_min, x_max] + mask_rois = results.get("cbd_mask_rois", []) # list of [y_min, y_max, x_min, x_max] for mask_roi in mask_rois: cryst_data = cryst_data.mask_region(mask_roi) - crop_roi = results.get("sf_crop_roi", None) # [y_min, y_max, x_min, x_max] + crop_roi = results.get("cbd_crop_roi", None) # [y_min, y_max, x_min, x_max] if crop_roi is not None: crop_roi_t = [crop_roi[2], crop_roi[3], crop_roi[0], crop_roi[1]]# y0, y1, x0, x1 -- 2.49.1