31 Commits
0.1.1 ... 0.2

Author SHA1 Message Date
2efbf3a64a fixed regression tests, file was archived 2024-03-13 17:20:01 +01:00
45b71789c8 fixed roi cutoffs 2024-03-13 16:28:36 +01:00
bdeb37b5e2 improved error handling in pulse to timestamp 2024-02-13 17:36:54 +01:00
cc893dcb22 added pulse id to datetime conversion 2024-02-01 13:36:47 +01:00
619d88bb7a added copy constructor to ROI 2023-12-07 10:05:36 +01:00
e960d2a11f toggle plot legend 2023-11-30 18:30:11 +01:00
9ff4b485ca and another fix for pgroup 2023-11-30 17:44:45 +01:00
9173e2fb6a bug fix in heuristic pgroup extraction 2023-11-30 17:42:33 +01:00
dd51585580 Update README.rst 2023-11-26 19:28:32 +01:00
1457839114 Update .gitlab-ci.yml file 2023-11-26 18:47:18 +01:00
3b49e9f2d8 Update .gitlab-ci.yml file 2023-11-26 18:34:31 +01:00
737c5aa8c0 added timestamp exctraction for runs 2023-11-26 17:53:13 +01:00
2a2d449339 plot style template added 2023-11-26 02:46:24 +01:00
b3001316ef channel names and more plots 2023-11-26 01:52:09 +01:00
087b4406b8 added 1D Gauss fit with LMFIT 2023-11-22 23:08:02 +01:00
19eeee0b37 fixed stack sum threshold 2023-11-22 03:41:16 +01:00
be78d68819 added ROI calculation including tests 2023-11-14 16:20:52 +01:00
4d5263e620 modified jupyter helper for collaboration >=2.x 2023-11-14 16:20:13 +01:00
aa6190e9dd Added checker for the pid offset between two channels 2023-11-13 18:18:53 +01:00
ff87dbdd60 Removing stand table from testing, because github can not read /sf/ 2023-11-13 15:25:26 +01:00
97f683b73c Added Stand table reading in utils 2023-11-13 15:14:51 +01:00
6b58d30f76 added jupyter helper module for collaborative features 2023-09-27 17:27:34 +02:00
be01a75c68 cleaned up caching and small fixes 2023-09-14 11:29:52 +02:00
787c863cd1 added image calculation tests 2023-08-17 12:40:17 +02:00
4735301f0d added image calculation tests 2023-08-17 12:28:57 +02:00
4ff6072056 added metadata collection routine and tests 2023-08-16 18:05:43 +02:00
546295ae77 added image analysis tools and small updates throughout 2023-07-24 18:14:41 +02:00
2e17a6f8b3 Update tests/test_utils.py 2023-04-21 15:57:43 +00:00
99a3f51c57 fixed for lmfit 1.2 2023-04-21 17:55:29 +02:00
f13e36ce6d Revert "enabled xraydb tests"
This reverts commit 53f7953d24
2023-04-21 08:05:19 +00:00
53f7953d24 enabled xraydb tests 2023-04-21 09:33:33 +02:00
14 changed files with 1681 additions and 2509 deletions

View File

@@ -18,3 +18,18 @@ run_tests:
- coverage run -m pytest
- coverage report
coverage: '/TOTAL.*\s+(\d+\%)/'
pages:
stage: deploy
before_script:
- echo "$CI_BUILDS_DIR"
- source /home/gitlab-runner/miniconda3/bin/activate cristallina
script:
- tox -e docs
- mv docs/_build/html/ public/
artifacts:
paths:
- public
rules:
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH

View File

@@ -27,6 +27,14 @@
:alt: Project generated with PyScaffold
:target: https://pyscaffold.org/
.. image:: https://gitlab.psi.ch/sf_cristallina/cristallina/badges/master/pipeline.svg
:alt: pipeline status
:target: https://gitlab.psi.ch/sf_cristallina/cristallina/-/commits/master
.. image:: https://gitlab.psi.ch/sf_cristallina/cristallina/-/badges/release.svg
:alt: Latest Release
:target: https://gitlab.psi.ch/sf_cristallina/cristallina/-/releases
.. image:: https://gitlab.psi.ch/sf_cristallina/cristallina/badges/master/coverage.svg
:alt: Cristallina coverage report
:target: https://gitlab.psi.ch/sf_cristallina/cristallina/-/commits/master

File diff suppressed because one or more lines are too long

View File

@@ -19,3 +19,5 @@ finally:
from . import utils
from . import plot
from . import analysis
from . import image
from . import channels

View File

@@ -1,5 +1,6 @@
import re
from collections import defaultdict
from collections import defaultdict, deque
from pathlib import Path
from typing import Optional
import numpy as np
@@ -10,14 +11,11 @@ import lmfit
from sfdata import SFDataFiles, sfdatafile, SFScanInfo
import joblib
from joblib import Parallel, delayed, Memory
from joblib import Memory
from . import utils
from .utils import ROI
memory = None
def setup_cachedirs(pgroup=None, cachedir=None):
"""
Sets the path to a persistent cache directory either from the given p-group (e.g. "p20841")
@@ -26,11 +24,10 @@ def setup_cachedirs(pgroup=None, cachedir=None):
If heuristics fail we use "/tmp" as a non-persistent alternative.
"""
global memory
if cachedir is not None:
# explicit directory given, use this choice
memory = Memory(cachedir, verbose=0, compress=2)
return
return memory
try:
if pgroup is None:
@@ -38,7 +35,15 @@ def setup_cachedirs(pgroup=None, cachedir=None):
else:
parts = re.split(r"(\d.*)", pgroup) # ['p', '2343', '']
pgroup_no = parts[-2]
cachedir = f"/das/work/units/cristallina/p{pgroup_no}/cachedir"
candidates = [f"/sf/cristallina/data/p{pgroup_no}/work",
f"/sf/cristallina/data/p{pgroup_no}/res",]
for cache_parent_dir in candidates:
if Path(cache_parent_dir).exists():
cachedir = Path(cache_parent_dir) / "cachedir"
break
except KeyError as e:
print(e)
cachedir = "/das/work/units/cristallina/p19739/cachedir"
@@ -48,14 +53,15 @@ def setup_cachedirs(pgroup=None, cachedir=None):
except PermissionError as e:
cachedir = "/tmp"
memory = Memory(cachedir, verbose=0, compress=2)
return memory
setup_cachedirs()
memory = setup_cachedirs()
@memory.cache(ignore=["batch_size"]) # we ignore batch_size for caching purposes
def perform_image_calculations(
fileset,
filesets,
channel="JF16T03V01",
alignment_channels=None,
batch_size=10,
@@ -65,7 +71,7 @@ def perform_image_calculations(
):
"""
Performs one or more calculations ("sum", "mean" or "std") for a given region of interest (roi)
for an image channel from a fileset (e.g. "run0352/data/acq0001.*.h5" or step.fnames from a SFScanInfo object).
for an image channel from a fileset (e.g. ["run0352/data/acq0001.*.h5"] or step.fnames from a SFScanInfo object).
Allows alignment, i.e. reducing only to a common subset with other channels.
@@ -83,7 +89,7 @@ def perform_image_calculations(
"std": ["mean", np.std],
}
with SFDataFiles(*fileset) as data:
with SFDataFiles(*filesets) as data:
if alignment_channels is not None:
channels = [channel] + [ch for ch in alignment_channels]
else:
@@ -122,7 +128,7 @@ def perform_image_calculations(
@memory.cache(ignore=["batch_size"]) # we ignore batch_size for caching purposes
def sum_images(
fileset,
filesets,
channel="JF16T03V01",
alignment_channels=None,
batch_size=10,
@@ -144,7 +150,7 @@ def sum_images(
"""
return perform_image_calculations(
fileset,
filesets,
channel=channel,
alignment_channels=alignment_channels,
batch_size=batch_size,
@@ -155,7 +161,7 @@ def sum_images(
def get_contrast_images(
fileset,
filesets,
channel="JF16T03V01",
alignment_channels=None,
batch_size=10,
@@ -167,7 +173,7 @@ def get_contrast_images(
"""
return perform_image_calculations(
fileset,
filesets,
channel=channel,
alignment_channels=alignment_channels,
batch_size=batch_size,
@@ -177,6 +183,143 @@ def get_contrast_images(
)
@memory.cache(ignore=["batch_size"]) # we ignore batch_size for caching purposes
def perform_image_stack_sum(
filesets,
channel="JF16T03V01",
alignment_channels=None,
batch_size=10,
roi: Optional[ROI] = None,
preview=False,
# operations=["sum"],
lower_cutoff_threshold=None,
):
"""
Performs one or more calculations ("sum", "mean" or "std") for a given region of interest (roi)
for an image channel from a fileset (e.g. "run0352/data/acq0001.*.h5" or step.fnames from a SFScanInfo object).
Allows alignment, i.e. reducing only to a common subset with other channels.
Calculations are performed in batches to reduce maximum memory requirements.
Preview only applies calculation to first batch and returns.
Returns a dictionary ({"JF16T03V01_intensity":[11, 18, 21, 55, ...]})
with the given channel values for each pulse and corresponding pulse id.
"""
possible_operations = {
"sum": ["intensity", np.sum],
"mean": ["mean", np.mean],
"std": ["mean", np.std],
}
with SFDataFiles(*filesets) as data:
if alignment_channels is not None:
channels = [channel] + [ch for ch in alignment_channels]
else:
channels = [channel]
subset = data[channels]
subset.drop_missing()
Images = subset[channel]
# create empty array for stack sum with right shape
im = Images[0]
if roi is None:
im_ROI = im[:]
else:
im_ROI = im[roi.rows, roi.cols]
summed = np.zeros(im_ROI[0].shape)
for image_slice in Images.in_batches(batch_size):
index_slice, im = image_slice
if roi is None:
im_ROI = im
else:
im_ROI = im[:, roi.rows, roi.cols]
if lower_cutoff_threshold is not None:
im_ROI = np.where(im_ROI < lower_cutoff_threshold, 0, im_ROI)
summed = summed + np.sum(im_ROI, axis=(0))
# only return first batch
if preview:
break
return summed
@memory.cache(ignore=["batch_size"]) # we ignore batch_size for caching purposes
def perform_image_roi_crop(
filesets,
channel="JF16T03V01",
alignment_channels=None,
batch_size=10,
roi: ROI = None,
preview=False,
lower_cutoff=None,
upper_cutoff=np.inf,
):
"""
Cuts out a region of interest (ROI) for an image channel
from a fileset (e.g. "run0352/data/acq0001.*.h5" or step.fnames from a SFScanInfo object).
Drops missing data from output and allows alignment,
i.e. reducing only to a common subset with other channels.
Calculations are performed in batches to reduce maximum memory requirements.
Lower- and upper cutoff allow to threshold the data.
Preview only applies calculation to first batch and returns.
Returns: An 1D array (along the pulses recorded without missing) of 2D images
Beware though: this can create a rather large array that exceeds available memory.
TODO: should we create a complete channel here instead of returning `raw` data?
"""
with SFDataFiles(*filesets) as data:
if alignment_channels is not None:
channels = [channel] + [ch for ch in alignment_channels]
else:
channels = [channel]
subset = data[channels]
subset.drop_missing()
Images = subset[channel]
rois_within_batch = list()
for image_slice in Images.in_batches(batch_size):
index_slice, im = image_slice
if roi is None:
im_ROI = im
else:
im_ROI = im[:, roi.rows, roi.cols]
if lower_cutoff is not None:
im_ROI = np.where((im_ROI < lower_cutoff) | (im_ROI > upper_cutoff), 0, im_ROI)
rois_within_batch.extend(im_ROI)
# only return first batch
if preview:
break
return np.array(rois_within_batch)
def fit_2d_gaussian(image, roi: Optional[ROI] = None, plot=False):
"""
2D Gaussian fit using LMFit for a given image and an optional region of interest.
@@ -204,7 +347,7 @@ def fit_2d_gaussian(image, roi: Optional[ROI] = None, plot=False):
z = im.ravel() # and this also as a 1D
model = lmfit.models.Gaussian2dModel()
params = model.guess(z, x, y)
params = model.guess(z.astype('float'), x.astype('float'), y.astype('float'))
result = model.fit(
z,
x=x,
@@ -405,3 +548,35 @@ def fit_2d_gaussian_rotated(
_plot_2d_gaussian_fit(im, z, mod, result)
return center_x, center_y, result
def fit_1d_gaussian(x, y, use_offset=True, ax=None, print_results=False):
"""
1D-Gaussian fit with optional constant offset using LMFIT.
Uses a heuristic guess for initial parameters.
Returns: lmfit.model.ModelResult
"""
peak = lmfit.models.GaussianModel()
offset = lmfit.models.ConstantModel()
model = peak + offset
if use_offset:
pars = offset.make_params(c = np.median(y))
else:
pars = offset.make_params(c=0)
pars['c'].vary = False
pars += peak.guess(y, x, amplitude=(np.max(y)-np.min(y))/2)
result = model.fit(y, pars, x=x,)
if print_results:
print(result.fit_report())
if ax is not None:
ax.plot(x, result.best_fit, label='fit')
return result

View File

@@ -0,0 +1,15 @@
# Machine parameters
GASMONITOR = 'SARFE10-PBIG050-EVR0:CALCI'
PULSE_E_AVG = 'SARFE10-PBPG050:PHOTON-ENERGY-PER-PULSE-AVG'
# Event receiver
EVR = 'SAR-CVME-TIFALL6:EvtSet'
# Jungfrau 1.5M detector
JF = 'JF16T03V01'
# PSSS
PSSS_X = 'SARFE10-PSSS059:SPECTRUM_X'
PSSS_Y = 'SARFE10-PSSS059:SPECTRUM_Y'
PSSS_SUM = 'SARFE10-PSSS059:SPECTRUM_Y_SUM'
PSSS_COM = 'SARFE10-PSSS059:SPECT-COM'

View File

@@ -0,0 +1,532 @@
#### MATPLOTLIBRC FORMAT
## ***************************************************************************
## * LINES *
## ***************************************************************************
## See https://matplotlib.org/stable/api/artist_api.html#module-matplotlib.lines
## for more information on line properties.
#lines.linewidth: 1.5 # line width in points
#lines.linestyle: - # solid line
#lines.color: C0 # has no affect on plot(); see axes.prop_cycle
#lines.marker: None # the default marker
#lines.markerfacecolor: auto # the default marker face color
#lines.markeredgecolor: auto # the default marker edge color
#lines.markeredgewidth: 1.0 # the line width around the marker symbol
#lines.markersize: 6 # marker size, in points
#lines.dash_joinstyle: round # {miter, round, bevel}
#lines.dash_capstyle: butt # {butt, round, projecting}
#lines.solid_joinstyle: round # {miter, round, bevel}
#lines.solid_capstyle: projecting # {butt, round, projecting}
#lines.antialiased: True # render lines in antialiased (no jaggies)
## The three standard dash patterns. These are scaled by the linewidth.
#lines.dashed_pattern: 3.7, 1.6
#lines.dashdot_pattern: 6.4, 1.6, 1, 1.6
#lines.dotted_pattern: 1, 1.65
#lines.scale_dashes: True
#markers.fillstyle: full # {full, left, right, bottom, top, none}
#pcolor.shading: auto
#pcolormesh.snap: True # Whether to snap the mesh to pixel boundaries. This is
# provided solely to allow old test images to remain
# unchanged. Set to False to obtain the previous behavior.
## ***************************************************************************
## * PATCHES *
## ***************************************************************************
## Patches are graphical objects that fill 2D space, like polygons or circles.
## See https://matplotlib.org/stable/api/artist_api.html#module-matplotlib.patches
## for more information on patch properties.
#patch.linewidth: 1.0 # edge width in points.
#patch.facecolor: C0
#patch.edgecolor: black # if forced, or patch is not filled
#patch.force_edgecolor: False # True to always use edgecolor
#patch.antialiased: True # render patches in antialiased (no jaggies)
## ***************************************************************************
## * HATCHES *
## ***************************************************************************
#hatch.color: black
#hatch.linewidth: 1.0
## ***************************************************************************
## * FONT *
## ***************************************************************************
## The font properties used by `text.Text`.
## See https://matplotlib.org/stable/api/font_manager_api.html for more information
## on font properties. The 6 font properties used for font matching are
## given below with their default values.
##
## The font.family property can take either a single or multiple entries of any
## combination of concrete font names (not supported when rendering text with
## usetex) or the following five generic values:
## - 'serif' (e.g., Times),
## - 'sans-serif' (e.g., Helvetica),
## - 'cursive' (e.g., Zapf-Chancery),
## - 'fantasy' (e.g., Western), and
## - 'monospace' (e.g., Courier).
## Each of these values has a corresponding default list of font names
## (font.serif, etc.); the first available font in the list is used. Note that
## for font.serif, font.sans-serif, and font.monospace, the first element of
## the list (a DejaVu font) will always be used because DejaVu is shipped with
## Matplotlib and is thus guaranteed to be available; the other entries are
## left as examples of other possible values.
##
## The font.style property has three values: normal (or roman), italic
## or oblique. The oblique style will be used for italic, if it is not
## present.
##
## The font.variant property has two values: normal or small-caps. For
## TrueType fonts, which are scalable fonts, small-caps is equivalent
## to using a font size of 'smaller', or about 83 % of the current font
## size.
##
## The font.weight property has effectively 13 values: normal, bold,
## bolder, lighter, 100, 200, 300, ..., 900. Normal is the same as
## 400, and bold is 700. bolder and lighter are relative values with
## respect to the current weight.
##
## The font.stretch property has 11 values: ultra-condensed,
## extra-condensed, condensed, semi-condensed, normal, semi-expanded,
## expanded, extra-expanded, ultra-expanded, wider, and narrower. This
## property is not currently implemented.
##
## The font.size property is the default font size for text, given in points.
## 10 pt is the standard value.
##
## Note that font.size controls default text sizes. To configure
## special text sizes tick labels, axes, labels, title, etc., see the rc
## settings for axes and ticks. Special text sizes can be defined
## relative to font.size, using the following values: xx-small, x-small,
## small, medium, large, x-large, xx-large, larger, or smaller
font.family: sans-serif
#font.style: normal
#font.variant: normal
#font.weight: normal
#font.stretch: normal
#font.size: 10.0
#font.serif: DejaVu Serif, Bitstream Vera Serif, Computer Modern Roman, New Century Schoolbook, Century Schoolbook L, Utopia, ITC Bookman, Bookman, Nimbus Roman No9 L, Times New Roman, Times, Palatino, Charter, serif
font.sans-serif: Roboto, DejaVu Sans, Bitstream Vera Sans, Computer Modern Sans Serif, Lucida Grande, Verdana, Geneva, Lucid, Arial, Helvetica, Avant Garde, sans-serif
#font.cursive: Apple Chancery, Textile, Zapf Chancery, Sand, Script MT, Felipa, Comic Neue, Comic Sans MS, cursive
#font.fantasy: Chicago, Charcoal, Impact, Western, xkcd script, fantasy
#font.monospace: DejaVu Sans Mono, Bitstream Vera Sans Mono, Computer Modern Typewriter, Andale Mono, Nimbus Mono L, Courier New, Courier, Fixed, Terminal, monospace
## ***************************************************************************
## * TEXT *
## ***************************************************************************
## The text properties used by `text.Text`.
## See https://matplotlib.org/stable/api/artist_api.html#module-matplotlib.text
## for more information on text properties
#text.color: black
## FreeType hinting flag ("foo" corresponds to FT_LOAD_FOO); may be one of the
## following (Proprietary Matplotlib-specific synonyms are given in parentheses,
## but their use is discouraged):
## - default: Use the font's native hinter if possible, else FreeType's auto-hinter.
## ("either" is a synonym).
## - no_autohint: Use the font's native hinter if possible, else don't hint.
## ("native" is a synonym.)
## - force_autohint: Use FreeType's auto-hinter. ("auto" is a synonym.)
## - no_hinting: Disable hinting. ("none" is a synonym.)
#text.hinting: force_autohint
#text.hinting_factor: 8 # Specifies the amount of softness for hinting in the
# horizontal direction. A value of 1 will hint to full
# pixels. A value of 2 will hint to half pixels etc.
#text.kerning_factor: 0 # Specifies the scaling factor for kerning values. This
# is provided solely to allow old test images to remain
# unchanged. Set to 6 to obtain previous behavior.
# Values other than 0 or 6 have no defined meaning.
#text.antialiased: True # If True (default), the text will be antialiased.
# This only affects raster outputs.
#text.parse_math: True # Use mathtext if there is an even number of unescaped
# dollar signs.
## ***************************************************************************
## * LaTeX *
## ***************************************************************************
## For more information on LaTeX properties, see
## https://matplotlib.org/stable/users/explain/text/usetex.html
#text.usetex: False # use latex for all text handling. The following fonts
# are supported through the usual rc parameter settings:
# new century schoolbook, bookman, times, palatino,
# zapf chancery, charter, serif, sans-serif, helvetica,
# avant garde, courier, monospace, computer modern roman,
# computer modern sans serif, computer modern typewriter
#text.latex.preamble: # IMPROPER USE OF THIS FEATURE WILL LEAD TO LATEX FAILURES
# AND IS THEREFORE UNSUPPORTED. PLEASE DO NOT ASK FOR HELP
# IF THIS FEATURE DOES NOT DO WHAT YOU EXPECT IT TO.
# text.latex.preamble is a single line of LaTeX code that
# will be passed on to the LaTeX system. It may contain
# any code that is valid for the LaTeX "preamble", i.e.
# between the "\documentclass" and "\begin{document}"
# statements.
# Note that it has to be put on a single line, which may
# become quite long.
# The following packages are always loaded with usetex,
# so beware of package collisions:
# geometry, inputenc, type1cm.
# PostScript (PSNFSS) font packages may also be
# loaded, depending on your font settings.
## The following settings allow you to select the fonts in math mode.
#mathtext.fontset: dejavusans # Should be 'dejavusans' (default),
# 'dejavuserif', 'cm' (Computer Modern), 'stix',
# 'stixsans' or 'custom'
## "mathtext.fontset: custom" is defined by the mathtext.bf, .cal, .it, ...
## settings which map a TeX font name to a fontconfig font pattern. (These
## settings are not used for other font sets.)
#mathtext.bf: sans:bold
#mathtext.bfit: sans:italic:bold
#mathtext.cal: cursive
#mathtext.it: sans:italic
#mathtext.rm: sans
#mathtext.sf: sans
#mathtext.tt: monospace
#mathtext.fallback: cm # Select fallback font from ['cm' (Computer Modern), 'stix'
# 'stixsans'] when a symbol cannot be found in one of the
# custom math fonts. Select 'None' to not perform fallback
# and replace the missing character by a dummy symbol.
#mathtext.default: it # The default font to use for math.
# Can be any of the LaTeX font names, including
# the special name "regular" for the same font
# used in regular text.
## ***************************************************************************
## * AXES *
## ***************************************************************************
## Following are default face and edge colors, default tick sizes,
## default font sizes for tick labels, and so on. See
## https://matplotlib.org/stable/api/axes_api.html#module-matplotlib.axes
#axes.facecolor: white # axes background color
#axes.edgecolor: black # axes edge color
#axes.linewidth: 0.8 # edge line width
#axes.grid: False # display grid or not
#axes.grid.axis: both # which axis the grid should apply to
#axes.grid.which: major # grid lines at {major, minor, both} ticks
#axes.titlelocation: center # alignment of the title: {left, right, center}
#axes.titlesize: large # font size of the axes title
#axes.titleweight: normal # font weight of title
#axes.titlecolor: auto # color of the axes title, auto falls back to
# text.color as default value
#axes.titley: None # position title (axes relative units). None implies auto
#axes.titlepad: 6.0 # pad between axes and title in points
#axes.labelsize: medium # font size of the x and y labels
#axes.labelpad: 4.0 # space between label and axis
#axes.labelweight: normal # weight of the x and y labels
#axes.labelcolor: black
#axes.axisbelow: line # draw axis gridlines and ticks:
# - below patches (True)
# - above patches but below lines ('line')
# - above all (False)
#axes.formatter.limits: -5, 6 # use scientific notation if log10
# of the axis range is smaller than the
# first or larger than the second
#axes.formatter.use_locale: False # When True, format tick labels
# according to the user's locale.
# For example, use ',' as a decimal
# separator in the fr_FR locale.
#axes.formatter.use_mathtext: False # When True, use mathtext for scientific
# notation.
#axes.formatter.min_exponent: 0 # minimum exponent to format in scientific notation
#axes.formatter.useoffset: True # If True, the tick label formatter
# will default to labeling ticks relative
# to an offset when the data range is
# small compared to the minimum absolute
# value of the data.
#axes.formatter.offset_threshold: 4 # When useoffset is True, the offset
# will be used when it can remove
# at least this number of significant
# digits from tick labels.
#axes.spines.left: True # display axis spines
#axes.spines.bottom: True
#axes.spines.top: True
#axes.spines.right: True
#axes.unicode_minus: True # use Unicode for the minus symbol rather than hyphen. See
# https://en.wikipedia.org/wiki/Plus_and_minus_signs#Character_codes
#axes.prop_cycle: cycler('color', ['1f77b4', 'ff7f0e', '2ca02c', 'd62728', '9467bd', '8c564b', 'e377c2', '7f7f7f', 'bcbd22', '17becf'])
# color cycle for plot lines as list of string color specs:
# single letter, long name, or web-style hex
# As opposed to all other parameters in this file, the color
# values must be enclosed in quotes for this parameter,
# e.g. '1f77b4', instead of 1f77b4.
# See also https://matplotlib.org/stable/users/explain/artists/color_cycle.html
# for more details on prop_cycle usage.
#axes.xmargin: .05 # x margin. See `axes.Axes.margins`
#axes.ymargin: .05 # y margin. See `axes.Axes.margins`
#axes.zmargin: .05 # z margin. See `axes.Axes.margins`
#axes.autolimit_mode: data # If "data", use axes.xmargin and axes.ymargin as is.
# If "round_numbers", after application of margins, axis
# limits are further expanded to the nearest "round" number.
#polaraxes.grid: True # display grid on polar axes
#axes3d.grid: True # display grid on 3D axes
#axes3d.xaxis.panecolor: (0.95, 0.95, 0.95, 0.5) # background pane on 3D axes
#axes3d.yaxis.panecolor: (0.90, 0.90, 0.90, 0.5) # background pane on 3D axes
#axes3d.zaxis.panecolor: (0.925, 0.925, 0.925, 0.5) # background pane on 3D axes
## ***************************************************************************
## * AXIS *
## ***************************************************************************
#xaxis.labellocation: center # alignment of the xaxis label: {left, right, center}
#yaxis.labellocation: center # alignment of the yaxis label: {bottom, top, center}
## ***************************************************************************
## * DATES *
## ***************************************************************************
## These control the default format strings used in AutoDateFormatter.
## Any valid format datetime format string can be used (see the python
## `datetime` for details). For example, by using:
## - '%x' will use the locale date representation
## - '%X' will use the locale time representation
## - '%c' will use the full locale datetime representation
## These values map to the scales:
## {'year': 365, 'month': 30, 'day': 1, 'hour': 1/24, 'minute': 1 / (24 * 60)}
#date.autoformatter.year: %Y
#date.autoformatter.month: %Y-%m
#date.autoformatter.day: %Y-%m-%d
#date.autoformatter.hour: %m-%d %H
#date.autoformatter.minute: %d %H:%M
#date.autoformatter.second: %H:%M:%S
#date.autoformatter.microsecond: %M:%S.%f
## The reference date for Matplotlib's internal date representation
## See https://matplotlib.org/stable/gallery/ticks/date_precision_and_epochs.html
#date.epoch: 1970-01-01T00:00:00
## 'auto', 'concise':
#date.converter: auto
## For auto converter whether to use interval_multiples:
#date.interval_multiples: True
## ***************************************************************************
## * TICKS *
## ***************************************************************************
## See https://matplotlib.org/stable/api/axis_api.html#matplotlib.axis.Tick
#xtick.top: False # draw ticks on the top side
#xtick.bottom: True # draw ticks on the bottom side
#xtick.labeltop: True # draw label on the top
#xtick.labelbottom: True # draw label on the bottom
#xtick.major.size: 3.5 # major tick size in points
#xtick.minor.size: 2 # minor tick size in points
#xtick.major.width: 0.8 # major tick width in points
#xtick.minor.width: 0.6 # minor tick width in points
#xtick.major.pad: 3.5 # distance to major tick label in points
#xtick.minor.pad: 3.4 # distance to the minor tick label in points
#xtick.color: black # color of the ticks
#xtick.labelcolor: inherit # color of the tick labels or inherit from xtick.color
#xtick.labelsize: medium # font size of the tick labels
xtick.direction: in # direction: {in, out, inout}
#xtick.minor.visible: False # visibility of minor ticks on x-axis
xtick.major.top: True # draw x axis top major ticks
xtick.major.bottom: True # draw x axis bottom major ticks
#xtick.minor.top: True # draw x axis top minor ticks
#xtick.minor.bottom: True # draw x axis bottom minor ticks
#xtick.minor.ndivs: auto # number of minor ticks between the major ticks on x-axis
#xtick.alignment: center # alignment of xticks
ytick.left: True # draw ticks on the left side
ytick.right: True # draw ticks on the right side
#ytick.labelleft: True # draw tick labels on the left side
#ytick.labelright: False # draw tick labels on the right side
#ytick.major.size: 3.5 # major tick size in points
#ytick.minor.size: 2 # minor tick size in points
#ytick.major.width: 0.8 # major tick width in points
#ytick.minor.width: 0.6 # minor tick width in points
#ytick.major.pad: 3.5 # distance to major tick label in points
#ytick.minor.pad: 3.4 # distance to the minor tick label in points
#ytick.color: black # color of the ticks
#ytick.labelcolor: inherit # color of the tick labels or inherit from ytick.color
#ytick.labelsize: medium # font size of the tick labels
ytick.direction: in # direction: {in, out, inout}
#ytick.minor.visible: False # visibility of minor ticks on y-axis
#ytick.major.left: True # draw y axis left major ticks
#ytick.major.right: True # draw y axis right major ticks
#ytick.minor.left: True # draw y axis left minor ticks
#ytick.minor.right: True # draw y axis right minor ticks
#ytick.minor.ndivs: auto # number of minor ticks between the major ticks on y-axis
#ytick.alignment: center_baseline # alignment of yticks
## ***************************************************************************
## * GRIDS *
## ***************************************************************************
#grid.color: "#b0b0b0" # grid color
#grid.linestyle: - # solid
#grid.linewidth: 0.8 # in points
#grid.alpha: 1.0 # transparency, between 0.0 and 1.0
## ***************************************************************************
## * LEGEND *
## ***************************************************************************
#legend.loc: best
#legend.frameon: True # if True, draw the legend on a background patch
legend.framealpha: 1 # legend patch transparency
#legend.facecolor: inherit # inherit from axes.facecolor; or color spec
#legend.edgecolor: 0.8 # background patch boundary color
#legend.fancybox: True # if True, use a rounded box for the
# legend background, else a rectangle
#legend.shadow: False # if True, give background a shadow effect
#legend.numpoints: 1 # the number of marker points in the legend line
#legend.scatterpoints: 1 # number of scatter points
#legend.markerscale: 1.0 # the relative size of legend markers vs. original
legend.fontsize: small
#legend.labelcolor: None
#legend.title_fontsize: None # None sets to the same as the default axes.
## Dimensions as fraction of font size:
#legend.borderpad: 0.4 # border whitespace
#legend.labelspacing: 0.5 # the vertical space between the legend entries
#legend.handlelength: 2.0 # the length of the legend lines
#legend.handleheight: 0.7 # the height of the legend handle
#legend.handletextpad: 0.8 # the space between the legend line and legend text
#legend.borderaxespad: 0.5 # the border between the axes and legend edge
#legend.columnspacing: 2.0 # column separation
## ***************************************************************************
## * FIGURE *
## ***************************************************************************
## See https://matplotlib.org/stable/api/figure_api.html#matplotlib.figure.Figure
#figure.titlesize: large # size of the figure title (``Figure.suptitle()``)
#figure.titleweight: normal # weight of the figure title
#figure.labelsize: large # size of the figure label (``Figure.sup[x|y]label()``)
#figure.labelweight: normal # weight of the figure label
#figure.figsize: 6.4, 4.8 # figure size in inches
figure.dpi: 110 # figure dots per inch
#figure.facecolor: white # figure face color
#figure.edgecolor: white # figure edge color
#figure.frameon: True # enable figure frame
#figure.max_open_warning: 20 # The maximum number of figures to open through
# the pyplot interface before emitting a warning.
# If less than one this feature is disabled.
#figure.raise_window : True # Raise the GUI window to front when show() is called.
## The figure subplot parameters. All dimensions are a fraction of the figure width and height.
#figure.subplot.left: 0.125 # the left side of the subplots of the figure
#figure.subplot.right: 0.9 # the right side of the subplots of the figure
#figure.subplot.bottom: 0.11 # the bottom of the subplots of the figure
#figure.subplot.top: 0.88 # the top of the subplots of the figure
#figure.subplot.wspace: 0.2 # the amount of width reserved for space between subplots,
# expressed as a fraction of the average axis width
#figure.subplot.hspace: 0.2 # the amount of height reserved for space between subplots,
# expressed as a fraction of the average axis height
## Figure layout
#figure.autolayout: False # When True, automatically adjust subplot
# parameters to make the plot fit the figure
# using `tight_layout`
figure.constrained_layout.use: True # When True, automatically make plot
# elements fit on the figure. (Not
# compatible with `autolayout`, above).
## Padding (in inches) around axes; defaults to 3/72 inches, i.e. 3 points.
#figure.constrained_layout.h_pad: 0.04167
#figure.constrained_layout.w_pad: 0.04167
## Spacing between subplots, relative to the subplot sizes. Much smaller than for
## tight_layout (figure.subplot.hspace, figure.subplot.wspace) as constrained_layout
## already takes surrounding texts (titles, labels, # ticklabels) into account.
#figure.constrained_layout.hspace: 0.02
#figure.constrained_layout.wspace: 0.02
## ***************************************************************************
## * IMAGES *
## ***************************************************************************
#image.aspect: equal # {equal, auto} or a number
#image.interpolation: antialiased # see help(imshow) for options
#image.cmap: viridis # A colormap name (plasma, magma, etc.)
#image.lut: 256 # the size of the colormap lookup table
#image.origin: upper # {lower, upper}
#image.resample: True
#image.composite_image: True # When True, all the images on a set of axes are
# combined into a single composite image before
# saving a figure as a vector graphics file,
# such as a PDF.
## ***************************************************************************
## * CONTOUR PLOTS *
## ***************************************************************************
#contour.negative_linestyle: dashed # string or on-off ink sequence
#contour.corner_mask: True # {True, False}
#contour.linewidth: None # {float, None} Size of the contour line
# widths. If set to None, it falls back to
# `line.linewidth`.
#contour.algorithm: mpl2014 # {mpl2005, mpl2014, serial, threaded}
## ***************************************************************************
## * ERRORBAR PLOTS *
## ***************************************************************************
#errorbar.capsize: 0 # length of end cap on error bars in pixels
## ***************************************************************************
## * HISTOGRAM PLOTS *
## ***************************************************************************
#hist.bins: 10 # The default number of histogram bins or 'auto'.
## ***************************************************************************
## * SCATTER PLOTS *
## ***************************************************************************
#scatter.marker: o # The default marker type for scatter plots.
#scatter.edgecolors: face # The default edge colors for scatter plots.
## ***************************************************************************
## * PATHS *
## ***************************************************************************
#path.simplify: True # When True, simplify paths by removing "invisible"
# points to reduce file size and increase rendering
# speed
#path.simplify_threshold: 0.111111111111 # The threshold of similarity below
# which vertices will be removed in
# the simplification process.
#path.snap: True # When True, rectilinear axis-aligned paths will be snapped
# to the nearest pixel when certain criteria are met.
# When False, paths will never be snapped.
#path.sketch: None # May be None, or a 3-tuple of the form:
# (scale, length, randomness).
# - *scale* is the amplitude of the wiggle
# perpendicular to the line (in pixels).
# - *length* is the length of the wiggle along the
# line (in pixels).
# - *randomness* is the factor by which the length is
# randomly scaled.
#path.effects:
## ***************************************************************************
## * SAVING FIGURES *
## ***************************************************************************
## The default savefig parameters can be different from the display parameters
## e.g., you may want a higher resolution, or to make the figure
## background white
#savefig.dpi: figure # figure dots per inch or 'figure'
#savefig.facecolor: auto # figure face color when saving
#savefig.edgecolor: auto # figure edge color when saving
#savefig.format: png # {png, ps, pdf, svg}
#savefig.bbox: standard # {tight, standard}
# 'tight' is incompatible with generating frames
# for animation
#savefig.pad_inches: 0.1 # padding to be used, when bbox is set to 'tight'
#savefig.directory: ~ # default directory in savefig dialog, gets updated after
# interactive saves, unless set to the empty string (i.e.
# the current directory); use '.' to start at the current
# directory but update after interactive saves
#savefig.transparent: False # whether figures are saved with a transparent
# background by default
#savefig.orientation: portrait # orientation of saved figure, for PostScript output only

46
src/cristallina/image.py Normal file
View File

@@ -0,0 +1,46 @@
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
from skimage import exposure
from skimage.filters import rank
from skimage.morphology import disk
def plot_image(image, norm='linear', cmap=plt.cm.viridis, title="Image", ax=None):
""" Plots an image (array-like or PIL image) using some default settings.
"""
if ax is None:
fig, ax = plt.subplots(constrained_layout=True, dpi=150)
ax.imshow(image, origin='lower', cmap=plt.cm.viridis, norm=norm)
ax.set_title(title)
def enhance_image(image, algorithm='autolevel', radius=5):
""" Enhanced a given image (2d array) using one out of
'equalize_hist'
'entropy'
'autolevel'
'global_equalize'
algorithms with a given (pixel) radius.
"""
arr_norm = np.clip(image, 0, np.max(image))
arr_norm = image/(np.max(image)-np.min(image))
if algorithm == 'equalize_hist':
img_algo = exposure.equalize_adapthist(arr_norm, kernel_size=radius, clip_limit=0.99)
elif algorithm == 'entropy':
img_algo = rank.entropy(arr_norm, footprint=disk(radius*2))
elif algorithm == 'autolevel':
img_algo = rank.autolevel(arr_norm, disk(radius*2))
elif algorithm == 'global_equalize':
img_algo = rank.equalize(arr_norm, footprint=disk(radius*2))
return img_algo

View File

@@ -0,0 +1,31 @@
""" Helper module to address https://github.com/jupyterlab/jupyter-collaboration/issues/49 and similar issues.
To use:
jupyter lab --YDocExtension.ystore_class=cristallina.jupyter_helper.MySQLiteYStore
In 2.0.x this is replaced by:
# The Store class used for storing Y updates (default: SQLiteYStore).
jupyter lab --YDocExtension.ystore_class=jupyter_collaboration.stores.FileYStore
This should work on the local consoles and also on RA (yet untested).
"""
from packaging.version import Version
import random
import jupyter_collaboration
if Version(jupyter_collaboration.__version__) < Version("1.9"):
from ypy_websocket.ystore import SQLiteYStore
else:
# approach for >=2.0
from jupyter_collaboration.stores import SQLiteYStore
class MySQLiteYStore(SQLiteYStore):
"""
Custom SQL location for jupyter collaboration to avoid NFS locking issues.
"""
suffix = random.randint(0, 1E9)
db_path = f"/tmp/ystore_{suffix}.db"

View File

@@ -5,6 +5,7 @@ import matplotlib
from matplotlib import pyplot as plt
import warnings
# because of https://github.com/kornia/kornia/issues/1425
warnings.simplefilter("ignore", DeprecationWarning)
@@ -20,6 +21,8 @@ import jungfrau_utils as ju
from . import utils
from .utils import ROI
# setup style sheet
plt.style.use("cristallina.cristallina_style")
def ju_patch_less_verbose(ju_module):
"""Quick monkey patch to suppress verbose messages from gain & pedestal file searcher."""
@@ -43,6 +46,7 @@ def ju_patch_less_verbose(ju_module):
ju_patch_less_verbose(ju)
def plot_correlation(x, y, ax=None, **ax_kwargs):
"""
Plots the correlation of x and y in a normalized scatterplot.
@@ -59,7 +63,7 @@ def plot_correlation(x, y, ax=None, **ax_kwargs):
ynorm = (y - np.mean(y)) / ystd
n = len(y)
r = 1 / (n) * sum(xnorm * ynorm)
if ax is None:
@@ -73,10 +77,11 @@ def plot_correlation(x, y, ax=None, **ax_kwargs):
return ax, r
def plot_channel(data : SFDataFiles, channel_name, ax=None):
"""
Plots a given channel from an SFDataFiles object.
def plot_channel(data: SFDataFiles, channel_name, ax=None):
"""
Plots a given channel from an SFDataFiles object.
Optionally: a matplotlib axis to plot into
"""
@@ -95,7 +100,6 @@ def plot_channel(data : SFDataFiles, channel_name, ax=None):
def axis_styling(ax, channel_name, description):
ax.set_title(channel_name)
# ax.set_xlabel('x')
# ax.set_ylabel('a.u.')
@@ -110,7 +114,7 @@ def axis_styling(ax, channel_name, description):
)
def plot_1d_channel(data : SFDataFiles, channel_name, ax=None):
def plot_1d_channel(data: SFDataFiles, channel_name, ax=None):
"""
Plots channel data for a channel that contains a single numeric value per pulse.
"""
@@ -131,7 +135,7 @@ def plot_1d_channel(data : SFDataFiles, channel_name, ax=None):
axis_styling(ax, channel_name, description)
def plot_2d_channel(data : SFDataFiles, channel_name, ax=None):
def plot_2d_channel(data: SFDataFiles, channel_name, ax=None):
"""
Plots channel data for a channel that contains a 1d array of numeric values per pulse.
"""
@@ -153,25 +157,27 @@ def plot_2d_channel(data : SFDataFiles, channel_name, ax=None):
axis_styling(ax, channel_name, description)
def plot_image_channel(data : SFDataFiles, channel_name, pulse=0, ax=None, rois=None, norms=None, log_colorscale=False):
def plot_detector_image(image_data, title=None, comment=None, ax=None, rois=None, norms=None, log_colorscale=False, show_legend=True, **fig_kw):
"""
Plots channel data for a channel that contains an image (2d array) of numeric values per pulse.
Optional:
Optional:
- rois: draw a rectangular patch for the given roi(s)
- norms: [min, max] values for colormap
- log_colorscale: True for a logarithmic colormap
- title: Title of the plot
- show_legend: True if the legend box should be drawn
"""
im = data[channel_name][pulse]
im = image_data
def log_transform(z):
return np.log(np.clip(z, 1E-12, np.max(z)))
return np.log(np.clip(z, 1e-12, np.max(z)))
if log_colorscale:
im = log_transform(im)
im = log_transform(im)
if ax is None:
fig, ax = plt.subplots(constrained_layout=True)
fig, ax = plt.subplots(constrained_layout=True, **fig_kw)
std = im.std()
mean = im.mean()
@@ -189,19 +195,45 @@ def plot_image_channel(data : SFDataFiles, channel_name, pulse=0, ax=None, rois=
for i, roi in enumerate(rois):
# Create a rectangle with ([bottom left corner coordinates], width, height)
rect = patches.Rectangle(
[roi.left, roi.bottom], roi.width, roi.height,
linewidth=3,
[roi.left, roi.bottom],
roi.width,
roi.height,
linewidth=2,
edgecolor=f"C{i}",
facecolor="none",
label=roi.name,
)
ax.add_patch(rect)
description = f"mean: {mean:.2e},\nstd: {std:.2e}"
axis_styling(ax, channel_name, description)
plt.legend(loc=4)
if comment is not None:
description = f"{comment}\nmean: {mean:.2e},\nstd: {std:.2e}"
else:
description = f"mean: {mean:.2e},\nstd: {std:.2e}"
def plot_spectrum_channel(data : SFDataFiles, channel_name_x, channel_name_y, average=True, pulse=0, ax=None):
if not show_legend:
description=""
axis_styling(ax, title, description)
def plot_image_channel(data: SFDataFiles, channel_name, pulse=0, ax=None, rois=None, norms=None, log_colorscale=False):
"""
Plots channel data for a channel that contains an image (2d array) of numeric values per pulse.
Optional:
- rois: draw a rectangular patch for the given roi(s)
- norms: [min, max] values for colormap
- log_colorscale: True for a logarithmic colormap
"""
image_data = data[channel_name][pulse]
plot_detector_image(
image_data, title=channel_name, ax=ax, rois=rois, norms=norms, log_colorscale=log_colorscale
)
def plot_spectrum_channel(data: SFDataFiles, channel_name_x, channel_name_y, average=True, pulse=0, ax=None):
"""
Plots channel data for two channels where the first is taken as the (constant) x-axis
and the second as the y-axis (here we take by default the mean over the individual pulses).
@@ -217,12 +249,11 @@ def plot_spectrum_channel(data : SFDataFiles, channel_name_x, channel_name_y, av
y_data = mean_over_frames
else:
y_data = data[channel_name_y].data[pulse]
if ax is None:
fig, ax = plt.subplots(constrained_layout=True)
ax.plot(data[channel_name_x].data[0], y_data)
description = None # f"mean: {mean:.2e},\nstd: {std:.2e}"
description = None # f"mean: {mean:.2e},\nstd: {std:.2e}"
ax.set_xlabel(channel_name_x)
axis_styling(ax, channel_name_y, description)

View File

@@ -1,43 +1,142 @@
import yaml
import os
import json
import re
from pathlib import Path
from collections import defaultdict
import requests
import datetime
from json import JSONDecodeError
import logging
logger = logging.getLogger()
import numpy as np
import pandas as pd
import sfdata
import sfdata.errors
from sfdata import SFDataFiles, sfdatafile, SFScanInfo, SFProcFile
from xraydb import material_mu
from joblib import Parallel, delayed, cpu_count
from scipy.stats import pearsonr
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from . import channels
def collect_runs_metadata(pgroup_data_dir: str | os.PathLike):
"""
Generates metadata overview for all runs in a given p-group data directory (e.g. "/sf/cristallina/data/p21261").
Not all available metadata is included, we skip e.g. lists of recorded channels.
Returns: Pandas dataframe with the individual acquisitions and the respective metadata.
"""
measurements = defaultdict(list)
raw_dir = Path(pgroup_data_dir) / "raw"
run_paths = sorted([run for run in Path(raw_dir).glob("run[0-9][0-9][0-9][0-9]")])
for run_path in run_paths:
# overall data in scan.json
with open(run_path / "meta/scan.json", "r") as file:
scan_meta = json.load(file)
# individual metadata in acqXXXX.json
num_acq = len(scan_meta["scan_files"])
acq_jsons = sorted([acq for acq in Path(run_path).glob("meta/acq[0-9][0-9][0-9][0-9].json")])
for i, acq in enumerate(acq_jsons):
with open(acq) as file:
acq_meta = json.load(file)
for parameter in [
"rate_multiplicator",
"user_tag",
"request_time",
"unique_acquisition_run_number",
"start_pulseid",
"stop_pulseid",
"client_name",
]:
try:
measurements[parameter].append(acq_meta[parameter])
except KeyError:
logger.info(f"No parameter {parameter} in metadata found, skipping.")
measurements[parameter] = None
for scan_parameter in ["scan_readbacks", "scan_step_info", "scan_values", "scan_readbacks_raw"]:
# in most scans we are only scanning one parameter at a time, so convert to single object here instead of list
meta_parameter = (
scan_meta[scan_parameter][i][0]
if len(scan_meta[scan_parameter][i]) == 1
else scan_meta[scan_parameter][i]
)
measurements[scan_parameter].append(meta_parameter)
# generate file pattern to cover all *.h5 of acquisition (this assumes we have some PVDATA which should almost always be the case).
measurements["files"].append(str(scan_meta["scan_files"][i][0]).replace("PVDATA", "*"))
df = pd.DataFrame(measurements)
# add run and acquisition numbers from fileset
# kind of added after the fact but now everything is in place
run_no, acq_no = [], []
for fileset in df["files"]:
pattern = r".*run(\d{4})/data/acq(\d{4})"
m = re.match(pattern, fileset)
run_no.append(int(m.groups()[0]))
acq_no.append(int(m.groups()[1]))
df["run"] = run_no
df["acq"] = acq_no
return df
def scan_info(run_number, base_path=None, small_data=True):
"""Returns SFScanInfo object for a given run number.
"""Returns SFScanInfo object for a given run number.
If there is are small data channels, they will be added (small_data=False to suppress their loading).
"""
if base_path == None:
base_path = heuristic_extract_base_path()
scan = SFScanInfo(f"{base_path}run{run_number:04}/meta/scan.json")
try:
if small_data:
for i in range(len(scan.readbacks)):
sd_path_for_step=heuristic_extract_smalldata_path()+'run'+str(run_number).zfill(4)+'/acq'+str(i+1).zfill(4)+'.smalldata*.h5'
scan.info['scan_files'][i].append(sd_path_for_step)
sd_path_for_step = (
heuristic_extract_smalldata_path()
+ "run"
+ str(run_number).zfill(4)
+ "/acq"
+ str(i + 1).zfill(4)
+ ".smalldata*.h5"
)
scan.info["scan_files"][i].append(sd_path_for_step)
except KeyError:
logger.warning("Failed to load smalldata.")
logger.warning("Failed to load smalldata.")
return scan
def get_scan_from_run_number_or_scan(run_number_or_scan,small_data=True):
def get_scan_from_run_number_or_scan(run_number_or_scan, small_data=True):
"""Returns SFScanInfo object from run number or SFScanInfo object (then just passes that to output)"""
if type(run_number_or_scan) == SFScanInfo:
scan = run_number_or_scan
else:
scan = scan_info(run_number_or_scan,small_data=small_data)
scan = scan_info(run_number_or_scan, small_data=small_data)
return scan
def get_run_number_from_run_number_or_scan(run_number_or_scan,small_data=True):
def get_run_number_from_run_number_or_scan(run_number_or_scan, small_data=True):
"""Returns run number from run number or SFScanInfo object"""
if type(run_number_or_scan) == int:
rn = run_number_or_scan
@@ -47,22 +146,27 @@ def get_run_number_from_run_number_or_scan(run_number_or_scan,small_data=True):
raise ValueError("Input must be an int or SFScanInfo object")
return rn
def channel_names(run_number,verbose=False):
def channel_names(run_number, verbose=False):
"""Prints channel names for a given run_number or scan object"""
if type(run_number) == SFScanInfo:
scan = run_number
else:
scan = scan_info(run_number)
channel_list = list(scan[0].keys())
if verbose:
print(channel_list)
return channel_list
def print_run_info(
run_number=42, print_channels=True, extra_verbose=False, base_path=None,
run_number=42,
print_channels=True,
extra_verbose=False,
base_path=None,
):
"""Prints overview of run information.
@@ -95,85 +199,159 @@ def print_run_info(
print(f"Total file size: {total_size/(1024*1024*1024):.1f} GB\n")
try:
for step in scan:
ch = step.channels
print("\n".join([str(c) for c in ch]))
# print only channels for first step
break
except sfdata.errors.NoUsableFileError:
logger.warning("Cannot access files on /sf...")
if print_channels:
try:
for step in scan:
ch = step.channels
print("\n".join([str(c) for c in ch]))
# print only channels for first step
break
except sfdata.errors.NoUsableFileError:
logger.warning("Cannot access files on /sf...")
def number_of_steps(scan_number_or_scan):
"""Returns number of steps for a scan object or run number"""
scan = get_scan_from_run_number_or_scan(scan_number_or_scan)
return len(scan.info['scan_readbacks'])
return len(scan.info["scan_readbacks"])
def process_run(run_number, rois,detector='JF16T03V01', calculate =None, only_shots=slice(None), n_jobs=cpu_count()-2):
def process_run(
run_number, rois, detector="JF16T03V01", calculate=None, only_shots=slice(None), n_jobs=cpu_count() - 2
):
"""Process rois for a given detector. Save the results small data in the res/small_data/run...
By default only sum of rois is calculated, [mean,std,img] can be added to the "calculate" optional parameter.
"""
"""
rn = get_run_number_from_run_number_or_scan(run_number)
# Load scan object with SFScanInfo
scan = scan_info(rn,small_data=False)
scan = scan_info(rn, small_data=False)
# Make the small data folder if it doesn't exist
if not os.path.exists( heuristic_extract_smalldata_path() ):
os.mkdir( heuristic_extract_smalldata_path() )
if not os.path.exists(heuristic_extract_smalldata_path()):
os.mkdir(heuristic_extract_smalldata_path())
# Set the path for later small data saving
path_with_run_folder = heuristic_extract_smalldata_path()+'run'+str(run_number).zfill(4)
path_with_run_folder = heuristic_extract_smalldata_path() + "run" + str(run_number).zfill(4)
# Make the small data run folder if it doesn't exist
if not os.path.exists( path_with_run_folder ):
os.mkdir( path_with_run_folder )
if not os.path.exists(path_with_run_folder):
os.mkdir(path_with_run_folder)
# Add scan info into the small_data_object
#for key in scan.info.keys():
# for key in scan.info.keys():
# d[key]= info.info[key]
# Check if there is only one roi. If yes, make a list of it so it can be iterated over.
if isinstance(rois, ROI):
rois = [rois]
def process_step(i):
scan = scan_info(run_number,small_data=False)
scan = scan_info(run_number, small_data=False)
step = scan[i]
with step as data:
with SFProcFile(f"{path_with_run_folder}/acq{str(i+1).zfill(4)}.smalldata.h5", mode="w") as sd:
# Calculate everything related to JF_rois
for roi in rois:
bottom, top, left, right = roi.bottom, roi.top, roi.left, roi.right
# Pulse ids for saving the new channels
det_pids = data[detector].pids
sd[roi.name] = det_pids[only_shots], data[detector][only_shots, bottom:top,left:right].sum(axis=(1, 2))
sd[roi.name] = det_pids[only_shots], data[detector][
only_shots, bottom:top, left:right
].sum(axis=(1, 2))
if calculate:
if 'means' in calculate:
sd[roi.name+"_means"] = (det_pids[only_shots], data[detector][only_shots, bottom:top,left:right].mean(axis=(1, 2)))
if 'std' in calculate:
sd[roi.name+"_std"] = (det_pids[only_shots], data[detector][only_shots, bottom:top,left:right].std(axis=(1, 2)))
if 'img' in calculate:
sd[f'{roi.name}_img'] = (det_pids[only_shots], data[detector][only_shots, bottom:top,left:right].data)
if "means" in calculate:
sd[roi.name + "_means"] = (
det_pids[only_shots],
data[detector][only_shots, bottom:top, left:right].mean(axis=(1, 2)),
)
if "std" in calculate:
sd[roi.name + "_std"] = (
det_pids[only_shots],
data[detector][only_shots, bottom:top, left:right].std(axis=(1, 2)),
)
if "img" in calculate:
sd[f"{roi.name}_img"] = (
det_pids[only_shots],
data[detector][only_shots, bottom:top, left:right].data,
)
# Currently meta files can't be read by SFData, this will be modified by Sven and then we can use it. For now saving in roi_info
#sd.meta[roi.name+"_info"] = f"roi {roi.name}: {left},{right}; {bottom},{top} (left, right, bottom, top)"
# sd.meta[roi.name+"_info"] = f"roi {roi.name}: {left},{right}; {bottom},{top} (left, right, bottom, top)"
# These channels have only one dataset per step of the scan, so we take the first pulseID
sd[roi.name + "_info"] =([det_pids[0]], [f"roi {roi.name}: {left},{right}; {bottom},{top} (left, right, bottom, top)"])
sd[roi.name + "_mean_img"] = ([det_pids[0]], [data[detector][only_shots, bottom:top,left:right].mean(axis=(0))] )
sd[roi.name + "_step_sum"] = ([det_pids[0]], [data[detector][only_shots, bottom:top,left:right].sum()] )
Parallel(n_jobs=n_jobs,verbose=10)(delayed(process_step)(i) for i in range(len(scan)))
sd[roi.name + "_info"] = (
[det_pids[0]],
[f"roi {roi.name}: {left},{right}; {bottom},{top} (left, right, bottom, top)"],
)
sd[roi.name + "_mean_img"] = (
[det_pids[0]],
[data[detector][only_shots, bottom:top, left:right].mean(axis=(0))],
)
sd[roi.name + "_step_sum"] = (
[det_pids[0]],
[data[detector][only_shots, bottom:top, left:right].sum()],
)
Parallel(n_jobs=n_jobs, verbose=10)(delayed(process_step)(i) for i in range(len(scan)))
def is_processed_JF_file(filepath, detector="JF16T03V01"):
"""Checks if a given .h5 file from a Jungfrau detector has been processed or only
contains raw adc values.
"""
import h5py
f = h5py.File(filepath)
try:
data = f["data"][detector]
except KeyError:
raise ValueError(f"{filepath} does not seem to be an Jungfrau file from the detector {detector}.")
return "meta" in f["data"][detector].keys()
def get_step_time(step: SFDataFiles):
"""Returns the start and end time as a unix timestamp (in seconds)
of a given SFDataFiles or scan step.
Assumes the gasmonitor was running and being recorded.
"""
t_start = step[channels.GASMONITOR].timestamps[0]
t_last = step[channels.GASMONITOR].timestamps[-1]
return t_start.astype(np.timedelta64) / np.timedelta64(1, "s"), t_last.astype(
np.timedelta64
) / np.timedelta64(1, "s")
# TODO: Clean this up
import time
import subprocess
from pathlib import Path
def wait_for_run(run_number, total_length=15):
base_path = cr.utils.heuristic_extract_base_path()
data_path = Path(base_path) / f"run{run_number:0>4}/data"
while True:
pvfiles = data_path.glob("acq*.PVDATA.h5")
length = len(list(pvfiles))
if length == total_length:
break
else:
print(f"Waiting for run {run_number} to complete, only {length} steps so far ...")
time.sleep(5)
subprocess.run(["paplay", "/tmp/CantinaBand3.wav"])
class ROI:
"""Definition of region of interest (ROI) in image coordinates.
Example: ROI(left=10, right=20, bottom=100, top=200).
Example: ROI(left=10, right=20, bottom=100, top=200).
Directions assume that lower left corner of image is at (x=0, y=0).
"""
@@ -189,10 +367,15 @@ class ROI:
width: int = None,
height: int = None,
name: str = None,
from_ROI: "ROI" = None,
):
if None not in (left, right, bottom, top):
self.left, self.right, self.bottom, self.top, = (
(
self.left,
self.right,
self.bottom,
self.top,
) = (
left,
right,
bottom,
@@ -200,14 +383,20 @@ class ROI:
)
elif None not in (center_x, center_y, width, height):
self.from_centers_widths(center_x, center_y, width, height)
elif isinstance(from_ROI, ROI):
self.left = from_ROI.left
self.right = from_ROI.right
self.top = from_ROI.top
self.bottom = from_ROI.bottom
name = from_ROI.name
else:
raise ValueError("No valid ROI definition.")
# Check that ROI has a name or generate default
if name is None:
logger.warning(f"No ROI name given, generating: {self.__repr__()}")
name = self.__repr__()
self.name = name
def from_centers_widths(self, center_x, center_y, width, height):
@@ -220,7 +409,7 @@ class ROI:
@property
def rows(self):
return slice(self.bottom, self.top)
@property
def LeftRightBottomTop(self):
return [self.left, self.right, self.bottom, self.top]
@@ -228,85 +417,166 @@ class ROI:
@property
def cols(self):
return slice(self.left, self.right)
@property
def width(self):
return self.right - self.left
@property
def height(self):
return self.top - self.bottom
def __repr__(self):
return f"ROI(bottom={self.bottom},top={self.top},left={self.left},right={self.right})"
if hasattr(self, "name"):
return f"ROI(bottom={self.bottom},top={self.top},left={self.left},right={self.right},name='{self.name}')"
else:
return f"ROI(bottom={self.bottom},top={self.top},left={self.left},right={self.right})"
def __eq__(self, other):
# we disregard the name for comparisons
return (self.left, self.right, self.bottom, self.top) == (other.left, other.right, other.bottom, other.top)
def __ne__(self, other):
return not self == other
return (self.left, self.right, self.bottom, self.top) == (
other.left,
other.right,
other.bottom,
other.top,
)
def __ne__(self, other):
return not self == other
def check_pid_offset(step, ch1, ch2, offsets_to_try=[-2, -1, 0, 1, 2], plot=False):
"""Check pid offset between two channels. Offset is the value that should be added to the second channel. Returns the best offset.
ch1 should be something that can be trusted, from experience for example the SARFE10-PBPG050:HAMP-INTENSITY-CAL is reliable.
"""
# Take the first step if scan object is given
if type(step) == sfdata.sfscaninfo.SFScanInfo:
step = step[0]
assert (
type(step) == sfdata.sfdatafiles.SFDataFiles
), "First parameter should be either a step or scan (if scan then first step is tested)"
assert type(ch1) == str, f"Channel {ch1} must be a string"
assert type(ch2) == str, f"Channel {ch2} must be a string"
corrs = []
if plot:
fig, ax = plt.subplots(1, len(offsets_to_try), figsize=(12, 3))
for i, pid_offset in enumerate(offsets_to_try):
ss = step[ch1, ch2] # Make a subset with 2 channels
ss[ch2].offset = pid_offset # Add offset to the second channel
ss.drop_missing()
x = ss[ch1].data
y = ss[ch2].data
if i == 0:
assert len(x.shape) == 1, f"Channel {ch1} has more than one dimension per pid"
assert len(y.shape) == 1, f"Channel {ch2} has more than one dimension per pid"
assert len(x) > 2, f"Channel {ch1} has less than 3 data points"
assert len(y) > 2, f"Channel {ch2} has less than 3 data points"
corr = pearsonr(x, y) # Calculate the correlation value
corrs.append(corr.correlation) # Save in the array
if plot:
if i == 0:
ax[i].set_ylabel(ch2)
ax[2].set_xlabel(ch1)
ax[i].plot(ss[ch1].data, ss[ch2].data, "ok", alpha=0.1)
ax[i].set_title(f"Corr = {np.round(corr.correlation,2)}")
best_offset = offsets_to_try[np.argmax(corrs)] # Best offest is where the correlation is the highest
if plot:
plt.suptitle(f"Best correlation for pid offset {best_offset}")
plt.tight_layout()
return best_offset
######################## Setting up paths ########################
def heuristic_extract_pgroup(path=None):
""" The function tries to guess the current p-group from the
current working directory (default) or the contents of
the given path.
"""The function tries to guess the current p-group from the
current working directory (default) or the contents of
the given path.
"""
path = path or os.getcwd()
if "/p" in path:
# Find the last /p in the path
p_index = path.rfind('/p')
# Cut the string and look at the next five letters after the last /p
p_number = path[p_index+2:p_index+7]
while "/p" in path:
# Find the last /p in the path
p_index = path.rfind("/p")
if not p_number.isdigit():
raise KeyError("Automatic p-group extraction from the current working directory didn't work.")
# Cut the string and look at the next five letters after the last /p
p_number = path[p_index + 2 : p_index + 7]
if len(p_number) == 5 and p_number.isdigit():
return p_number
path = path[:p_index] # and cut the end off
else:
raise KeyError("Automatic p-group extraction from the current working directory didn't work.")
return p_number
def heuristic_extract_base_path():
""" The function tries to guess the full path where the raw data is saved."""
"""The function tries to guess the full path where the raw data is saved."""
p_number = heuristic_extract_pgroup()
base_path = f"/sf/cristallina/data/p{p_number}/raw/"
return base_path
return base_path
def heuristic_extract_smalldata_path():
""" The function tries to guess the full path where the small data is saved."""
"""The function tries to guess the full path where the small data is saved."""
p_number = heuristic_extract_pgroup()
small_data_path = f"/das/work/p{str(p_number)[0:2]}/p{p_number}/smalldata/"
return small_data_path
return small_data_path
def stand_table(pgroup=None):
"""Reads the stand table. If no pgroup given it tries to guess it from the current path.
For other pgrous, add pgroup= as optional parameter."""
if pgroup:
# P group might be added as 'p12345' or just 12345, so both cases should be taken care of
if "p" in str(pgroup):
# Cut the string and look at the next five letters after the last /p
p_index = pgroup.rfind("/p")
p_number = pgroup[p_index + 2 : p_index + 7]
base_path = f"/sf/cristallina/data/p{p_number}/raw/"
else:
base_path = f"/sf/cristallina/data/p{pgroup}/raw/"
else:
base_path = heuristic_extract_base_path()
stand_file = base_path.replace("raw", "res") + "output.h5"
stand_table = pd.read_hdf(stand_file)
return stand_table
######################## Little useful functions ########################
def find_nearest(array, value):
'''Finds an index in an array with a value that is nearest to given number'''
"""Finds an index in an array with a value that is nearest to given number"""
array = np.asarray(array)
idx = (np.abs(array - value)).argmin()
return idx
def find_two_nearest(time_array,percentage):
'''Finds indeces of the two values that are the nearest to the given value in an array'''
def find_two_nearest(time_array, percentage):
"""Finds indeces of the two values that are the nearest to the given value in an array"""
array = np.asarray(time_array)
value = (np.max(array)-np.min(array))*percentage+np.min(array)
value = (np.max(array) - np.min(array)) * percentage + np.min(array)
idx = (np.abs(array - value)).argmin()
indices = np.sort([np.argsort(np.abs(array-value))[0], np.argsort(np.abs(array-value))[1]])
return indices
indices = np.sort([np.argsort(np.abs(array - value))[0], np.argsort(np.abs(array - value))[1]])
return indices
def gauss(x, H, A, x0, sigma):
"""Returns gauss function value"""
return H + A * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2))
return H + A * np.exp(-((x - x0) ** 2) / (2 * sigma**2))
def gauss_fit(x, y, fit_details=None, plot=None):
'''Returns [baseline_offset, Amplitude, center, sigma, FWHM]'''
"""Returns [baseline_offset, Amplitude, center, sigma, FWHM]"""
# Initial guesses
mean = sum(x * y) / sum(y)
@@ -317,34 +587,35 @@ def gauss_fit(x, y, fit_details=None, plot=None):
popt, pcov = curve_fit(gauss, x, y, p0=[min(y), max(y), mean, sigma])
# Add FWHM to the ouptuts
popt = np.append(popt,2.35482 * popt[3])
popt = np.append(popt, 2.35482 * popt[3])
# Print results
if fit_details :
print('The baseline offset is', popt[0])
print('The center is', popt[2])
print('The sigma of the fit is', popt[3])
print('The maximum intensity is', popt[0] + popt[1])
print('The Amplitude is', popt[1])
print('The FWHM is', 2.35482 * popt[3])
if fit_details:
print("The baseline offset is", popt[0])
print("The center is", popt[2])
print("The sigma of the fit is", popt[3])
print("The maximum intensity is", popt[0] + popt[1])
print("The Amplitude is", popt[1])
print("The FWHM is", 2.35482 * popt[3])
# Show plot
if plot:
plt.figure()
plt.plot(x, y, '.k', label='data')
plt.plot(x, gauss(x, *gauss_fit(x, y)[0:4]), '--r', label='fit')
plt.plot(x, y, ".k", label="data")
plt.plot(x, gauss(x, *gauss_fit(x, y)[0:4]), "--r", label="fit")
plt.legend()
plt.title('Gaussian fit')
plt.xlabel('X')
plt.ylabel('Y')
plt.title("Gaussian fit")
plt.xlabel("X")
plt.ylabel("Y")
plt.show()
return popt
def xray_transmission(energy,thickness,material='Si',density=[]):
'''Calculate x-ray tranmission for given energy, thickness and material. Default material is Si. Add material=element as a string for another material. Density as optional parameter'''
def xray_transmission(energy, thickness, material="Si", density=[]):
"""Calculate x-ray tranmission for given energy, thickness and material. Default material is Si. Add material=element as a string for another material. Density as optional parameter"""
mu = material_mu(material, energy)
if density == []:
@@ -352,38 +623,63 @@ def xray_transmission(energy,thickness,material='Si',density=[]):
else:
mu_array = material_mu(formula, energy, density=density)
trans = np.exp(-0.1*(thickness*1000)*mu_array) # 0.1 is beccause mu is in 1/cm, thickness converted to mm from m
trans = np.exp(
-0.1 * (thickness * 1000) * mu_array
) # 0.1 is beccause mu is in 1/cm, thickness converted to mm from m
return trans
return trans
######################## Unit conversions ########################
def joules_to_eV(joules):
"""Just a unit conversion"""
eV = joules * 6.241509e18
return eV
def eV_to_joules(eV):
"""Just a unit conversion"""
joules = eV * 1.602176565e-19
return joules
return joules
def photon_energy_from_wavelength(wavelength):
'''Returns photon energy in eV from wavelength in meters. Source https://www.kmlabs.com/en/wavelength-to-photon-energy-calculator'''
Eph = 1239.8 / (wavelength*1e9)
"""Returns photon energy in eV from wavelength in meters. Source https://www.kmlabs.com/en/wavelength-to-photon-energy-calculator"""
Eph = 1239.8 / (wavelength * 1e9)
return Eph
def wavelength_from_photon_energy(Eph):
'''Returns wavelength in meters from photon energy in eV. Source https://www.kmlabs.com/en/wavelength-to-photon-energy-calculator'''
wavelength = 1239.8 / (Eph*1e9)
"""Returns wavelength in meters from photon energy in eV. Source https://www.kmlabs.com/en/wavelength-to-photon-energy-calculator"""
wavelength = 1239.8 / (Eph * 1e9)
return wavelength
def sigma_to_FWHM(sigma):
"""Gaussian sigma to FWHM"""
FWHM = sigma * 2.355
return FWHM
def FWHM_to_sigma(FWHM):
"""FWHM to gaussian sigma"""
sigma = FWHM / 2.355
return sigma
def pulseid_to_timestamp(pulse_id):
""" Converts pulse id to datetime using the data-api server.
"""
r = requests.get(f"https://data-api.psi.ch/api/4/map/pulse/sf-databuffer/{pulse_id}")
try:
ns_timestamp = r.json()
timestamp = datetime.datetime.fromtimestamp(ns_timestamp/1E9)
except (JSONDecodeError, TypeError) as e:
raise ValueError(f"Cannot convert pulse id {pulse_id} to timestamp. Cause: {e}")
return timestamp

View File

@@ -1,66 +1,151 @@
import pytest
import numpy as np
import unittest.mock
import cristallina.analysis
import cristallina.utils
__author__ = "Alexander Steppke"
def test_minimal_2d_gaussian():
image = np.array([[0,0,0,0,0],
[0,0,0,0,0],
[0,0,1,0,0],
[0,0,0,0,0],
[0,0,0,0,0],
])
def test_joblib_memory():
"""We need joblib for fast caching of intermediate results in all cases. So we check
if the basic function caching to disk works.
"""
def calc_example(x):
return x**2
calc_cached = cristallina.analysis.memory.cache(calc_example)
assert calc_cached(8) == 64
assert calc_cached.check_call_in_cache(8) == True
@unittest.mock.patch(
"jungfrau_utils.file_adapter.locate_gain_file",
lambda path, **kwargs: "tests/data/gains.h5",
)
@unittest.mock.patch(
"jungfrau_utils.file_adapter.locate_pedestal_file",
lambda path, **kwargs: "tests/data/JF16T03V01.res.h5",
)
def test_image_calculations():
res = cristallina.analysis.perform_image_calculations(["tests/data/p20841/raw/run0185/data/acq0001.*.h5"])
# these values are only correct when using the specific gain and pedestal files included in the test data
# they do not correspond to the gain and pedestal files used in the actual analysis
intensity = [
1712858.6,
693994.06,
1766390.0,
1055504.9,
1516520.9,
461969.06,
3148285.5,
934917.5,
1866691.6,
798191.2,
2250207.0,
453842.6,
]
assert np.allclose(res["JF16T03V01_intensity"], intensity)
@unittest.mock.patch(
"jungfrau_utils.file_adapter.locate_gain_file",
lambda path, **kwargs: "tests/data/gains.h5",
)
@unittest.mock.patch(
"jungfrau_utils.file_adapter.locate_pedestal_file",
lambda path, **kwargs: "tests/data/JF16T03V01.res.h5",
)
def test_roi_calculation():
roi = cristallina.utils.ROI(left=575, right=750, top=750, bottom=600)
cutouts = cristallina.analysis.perform_image_roi_crop(["tests/data/p20841/raw/run0205/data/acq0001.*.h5"], roi=roi)
sum_roi, max_roi, min_roi = (
np.sum(cutouts[11]),
np.max(cutouts[11]),
np.min(cutouts[11]),
)
# these values are only correct when using the specific gain and pedestal files included in the test data
# they do not correspond to the gain and pedestal files used in the actual analysis
assert np.allclose([3119071.8, 22381.547, -0.9425874], [sum_roi, max_roi, min_roi])
def test_minimal_2d_gaussian():
image = np.array(
[
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
]
)
center_x, center_y, result = cristallina.analysis.fit_2d_gaussian(image)
assert np.allclose(center_x, 2.0, rtol=1e-04)
assert np.allclose(center_y, 2.0, rtol=1e-04)
def test_2d_gaussian():
# define normalized 2D gaussian
def gauss2d(x=0, y=0, mx=0, my=0, sx=1, sy=1):
return 1 / (2 * np.pi * sx * sy) * np.exp(-((x - mx) ** 2 / (2 * sx**2.0) + (y - my) ** 2 / (2 * sy**2)))
# define normalized 2D gaussian
def gauss2d(x=0, y=0, mx=0, my=0, sx=1, sy=1):
return (1 / (2 * np.pi * sx * sy) * np.exp(-((x - mx) ** 2 / (2 * sx**2.0) + (y - my) ** 2 / (2 * sy**2))))
x = np.arange(0, 150, 1)
y = np.arange(0, 100, 1)
x, y = np.meshgrid(x, y)
x = np.arange(0, 150, 1)
y = np.arange(0, 100, 1)
x, y = np.meshgrid(x, y)
z = gauss2d(x, y, mx=40, my=50, sx=20, sy=40)
z = gauss2d(x, y, mx=40, my=50, sx=20, sy=40)
center_x, center_y, result = cristallina.analysis.fit_2d_gaussian(z)
assert np.allclose(center_x, 40, rtol=1e-04)
assert np.allclose(center_y, 50, rtol=1e-04)
center_x, center_y, result = cristallina.analysis.fit_2d_gaussian(z)
assert np.allclose(center_x, 40, rtol=1e-04)
assert np.allclose(center_y, 50, rtol=1e-04)
def test_2d_gaussian_rotated():
# define normalized 2D gaussian
def gauss2d_rotated(x=0, y=0, center_x=0, center_y=0, sx=1, sy=1, rotation=0):
sr = np.sin(rotation)
cr = np.cos(rotation)
# define normalized 2D gaussian
def gauss2d_rotated(x=0, y=0, center_x=0, center_y=0, sx=1, sy=1, rotation=0):
center_x_rot = center_x * cr - center_y * sr
center_y_rot = center_x * sr + center_y * cr
sr = np.sin(rotation)
cr = np.cos(rotation)
x_rot = x * cr - y * sr
y_rot = x * sr + y * cr
center_x_rot = center_x * cr - center_y * sr
center_y_rot = center_x * sr + center_y * cr
return (
1
/ (2 * np.pi * sx * sy)
* np.exp(-((x_rot - center_x_rot) ** 2 / (2 * sx**2.0) + (y_rot - center_y_rot) ** 2 / (2 * sy**2)))
)
x_rot = x * cr - y * sr
y_rot = x * sr + y * cr
x = np.arange(0, 150, 1)
y = np.arange(0, 100, 1)
x, y = np.meshgrid(x, y)
return (1 / (2 * np.pi * sx * sy) * np.exp(-((x_rot - center_x_rot) ** 2 / (2 * sx**2.0) + (y_rot - center_y_rot) ** 2 / (2 * sy**2))))
z = 100 * gauss2d_rotated(x, y, center_x=40, center_y=50, sx=10, sy=20, rotation=0.5)
x = np.arange(0, 150, 1)
y = np.arange(0, 100, 1)
x, y = np.meshgrid(x, y)
center_x, center_y, result = cristallina.analysis.fit_2d_gaussian_rotated(z, vary_rotation=True, plot=False)
assert np.allclose(center_x, 40, rtol=1e-04)
assert np.allclose(center_y, 50, rtol=1e-04)
assert np.allclose(result.params["rotation"].value, 0.5, rtol=1e-02)
z = 100*gauss2d_rotated(x, y, center_x=40, center_y=50, sx=10, sy=20, rotation=0.5)
center_x, center_y, result = cristallina.analysis.fit_2d_gaussian_rotated(z, vary_rotation=True, plot=False)
assert np.allclose(center_x, 40, rtol=1e-04)
assert np.allclose(center_y, 50, rtol=1e-04)
assert np.allclose(result.params['rotation'].value, 0.5, rtol=1e-02)
def test_1d_gaussian():
def gauss(x, H, A, x0, sigma):
return H + A * np.exp(-((x - x0) ** 2) / (2 * sigma**2))
x = np.linspace(0, 20)
y = gauss(x, 0.5, 2, 10, 4)
res = cristallina.analysis.fit_1d_gaussian(x, y)
assert np.allclose(res.values["center"], 10)
assert np.allclose(res.values["sigma"], 4)
assert np.allclose(res.values["height"], 2)

View File

@@ -9,8 +9,8 @@ import tracemalloc
@pytest.mark.regression
def test_JU_memory():
base_path = "/sf/cristallina/data/p19739/raw/"
run_number = 49
base_path = "/sf/cristallina/data/p19150/raw"
run_number = 146
averages = []
tracemalloc.start()

View File

@@ -1,30 +1,39 @@
import pytest
import os
import numpy as np
from cristallina.utils import ROI, print_run_info, heuristic_extract_pgroup, gauss, find_nearest, xray_transmission
import cristallina.utils as utils
from sfdata import SFDataFiles
from cristallina.utils import ROI, print_run_info, heuristic_extract_pgroup, gauss, find_nearest, xray_transmission,stand_table,check_pid_offset
def test_print(capsys):
# this currently works only if the heuristic p-group extraction works
# TODO: fix this shortcoming and use base_path throughout
print_run_info(185, base_path="tests/data/p20841/raw/")
captured = capsys.readouterr()
assert "17259343145" in captured.out
assert "scan_parameters" in captured.out
def test_collect_metadata():
test_pgroup_dir = "tests/data/p20841"
df = utils.collect_runs_metadata(test_pgroup_dir)
assert df.iloc[1]["user_tag"] == "PMS, Magnet at 78K, 400V excitation"
assert df.iloc[1]["start_pulseid"] == 17358560870
def test_ROI():
"""API Tests"""
r = ROI(left=1, right=2, top=4, bottom=2)
assert r.width == 1
assert r.height == 2
assert r.name is not None
def test_extract_pgroup():
cwd = os.getcwd()
os.chdir("tests/data/p20841/raw/")
assert heuristic_extract_pgroup() == '20841'
os.chdir(cwd)
def test_gauss():
@@ -36,9 +45,17 @@ def test_find_nearest():
a = np.linspace(0,99,100)
assert find_nearest(a,10.1) == 10
# TODO: repair with newer xraydb 4.5.0 when available in conda-forge
# def test_xray_transmission():
# T = xray_transmission(9000, 100e-6, material = 'Si')
# assert T == 0.342385039732607
def test_xray_transmission():
T = xray_transmission(9000, 100e-6, material = 'Si')
assert T == 0.342385039732607
def test_check_pid_offset():
with SFDataFiles(f"tests/data/p20841/raw/run0205/data/acq*.h5") as data:
assert check_pid_offset(data,'SARFE10-PBPG050:HAMP-INTENSITY-CAL','SARFE10-PBIG050-EVR0:CALCI') == 0
# This test can only be run localy (github has no access to /sf/cristallina), therefore it's commented out.
# def test_stand_table():
# # Load run from a p-group where we have saved the stand table. First run recorded was number 27, so check that.
# table = stand_table(21563)
# assert table['run'][0] == 27