Refactor handling of the result dict
This commit is contained in:
@ -195,10 +195,10 @@ def create():
|
||||
plot_circle_source = ColumnDataSource(dict(x=[], y=[]))
|
||||
plot.add_glyph(plot_circle_source, Circle(x="x", y="y"))
|
||||
|
||||
numfit_min_span = Span(location=None, dimension='height', line_dash='dashed')
|
||||
numfit_min_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
plot.add_layout(numfit_min_span)
|
||||
|
||||
numfit_max_span = Span(location=None, dimension='height', line_dash='dashed')
|
||||
numfit_max_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
plot.add_layout(numfit_max_span)
|
||||
|
||||
# Measurement select
|
||||
@ -296,10 +296,8 @@ def create():
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=450, height=400)
|
||||
|
||||
def peakfind_all_button_callback():
|
||||
nonlocal det_data
|
||||
for meas in det_data["Measurements"]:
|
||||
det_data = pyzebra.ccl_findpeaks(
|
||||
det_data,
|
||||
for meas in det_data["Measurements"].values():
|
||||
pyzebra.ccl_findpeaks(
|
||||
meas,
|
||||
int_threshold=peak_int_ratio_spinner.value,
|
||||
prominence=peak_prominence_spinner.value,
|
||||
@ -317,12 +315,10 @@ def create():
|
||||
peakfind_all_button.on_click(peakfind_all_button_callback)
|
||||
|
||||
def peakfind_button_callback():
|
||||
nonlocal det_data
|
||||
sel_ind = meas_table_source.selected.indices[-1]
|
||||
meas = meas_table_source.data["measurement"][sel_ind]
|
||||
det_data = pyzebra.ccl_findpeaks(
|
||||
det_data,
|
||||
meas,
|
||||
pyzebra.ccl_findpeaks(
|
||||
det_data["Measurements"][meas],
|
||||
int_threshold=peak_int_ratio_spinner.value,
|
||||
prominence=peak_prominence_spinner.value,
|
||||
smooth=smooth_toggle.active,
|
||||
@ -337,12 +333,10 @@ def create():
|
||||
peakfind_button.on_click(peakfind_button_callback)
|
||||
|
||||
def fit_all_button_callback():
|
||||
nonlocal det_data
|
||||
for meas in det_data["Measurements"]:
|
||||
num_of_peaks = det_data["Measurements"][meas].get("num_of_peaks")
|
||||
for meas in det_data["Measurements"].values():
|
||||
num_of_peaks = meas.get("num_of_peaks")
|
||||
if num_of_peaks is not None and num_of_peaks == 1:
|
||||
det_data = pyzebra.fitccl(
|
||||
det_data,
|
||||
pyzebra.fitccl(
|
||||
meas,
|
||||
guess=[
|
||||
centre_guess.value,
|
||||
@ -383,15 +377,13 @@ def create():
|
||||
fit_all_button.on_click(fit_all_button_callback)
|
||||
|
||||
def fit_button_callback():
|
||||
nonlocal det_data
|
||||
sel_ind = meas_table_source.selected.indices[-1]
|
||||
meas = meas_table_source.data["measurement"][sel_ind]
|
||||
|
||||
num_of_peaks = det_data["Measurements"][meas].get("num_of_peaks")
|
||||
if num_of_peaks is not None and num_of_peaks == 1:
|
||||
det_data = pyzebra.fitccl(
|
||||
det_data,
|
||||
meas,
|
||||
pyzebra.fitccl(
|
||||
det_data["Measurements"][meas],
|
||||
guess=[
|
||||
centre_guess.value,
|
||||
sigma_guess.value,
|
||||
|
@ -5,11 +5,11 @@ from scipy.signal import savgol_filter
|
||||
|
||||
|
||||
def ccl_findpeaks(
|
||||
data, keys, int_threshold=0.8, prominence=50, smooth=False, window_size=7, poly_order=3
|
||||
meas, int_threshold=0.8, prominence=50, smooth=False, window_size=7, poly_order=3
|
||||
):
|
||||
|
||||
"""function iterates through the dictionary created by load_cclv2 and locates peaks for each measurement
|
||||
args: data (dictionary from load_cclv2),
|
||||
args: meas - a single measurement,
|
||||
|
||||
int_threshold - fraction of threshold_intensity/max_intensity, must be positive num between 0 and 1
|
||||
i.e. will only detect peaks above 75% of max intensity
|
||||
@ -29,11 +29,6 @@ def ccl_findpeaks(
|
||||
'peak_heights': [90.], # height of the peaks (if data vere smoothed
|
||||
its the heigh of the peaks in smoothed data)
|
||||
"""
|
||||
meas = data["Measurements"][keys]
|
||||
|
||||
if type(data) is not dict and data["file_type"] != "ccl":
|
||||
print("Data is not a dictionary or was not made from ccl file")
|
||||
|
||||
if not 0 <= int_threshold <= 1:
|
||||
int_threshold = 0.8
|
||||
print(
|
||||
@ -41,27 +36,27 @@ def ccl_findpeaks(
|
||||
int_threshold,
|
||||
)
|
||||
|
||||
if isinstance(window_size, int) is False or (window_size % 2) == 0 or window_size <= 1:
|
||||
if not isinstance(window_size, int) or (window_size % 2) == 0 or window_size <= 1:
|
||||
window_size = 7
|
||||
print(
|
||||
"Invalid value for window_size, select positive odd integer, new value set to!:",
|
||||
window_size,
|
||||
)
|
||||
|
||||
if isinstance(poly_order, int) is False or window_size < poly_order:
|
||||
if not isinstance(poly_order, int) or window_size < poly_order:
|
||||
poly_order = 3
|
||||
print(
|
||||
"Invalid value for poly_order, select positive integer smaller than window_size, new value set to:",
|
||||
poly_order,
|
||||
)
|
||||
|
||||
if isinstance(prominence, (int, float)) is False and prominence < 0:
|
||||
if not isinstance(prominence, (int, float)) and prominence < 0:
|
||||
prominence = 50
|
||||
print("Invalid value for prominence, select positive number, new value set to:", prominence)
|
||||
|
||||
omega = meas["om"]
|
||||
counts = np.array(meas["Counts"])
|
||||
if smooth is True:
|
||||
if smooth:
|
||||
itp = interp1d(omega, counts, kind="linear")
|
||||
absintensity = [abs(number) for number in counts]
|
||||
lowest_intensity = min(absintensity)
|
||||
@ -71,12 +66,10 @@ def ccl_findpeaks(
|
||||
else:
|
||||
smooth_peaks = counts
|
||||
|
||||
indexes = sc.signal.find_peaks(
|
||||
peaks, properties = sc.signal.find_peaks(
|
||||
smooth_peaks, height=int_threshold * max(smooth_peaks), prominence=prominence
|
||||
)
|
||||
meas["num_of_peaks"] = len(indexes[0])
|
||||
meas["peak_indexes"] = indexes[0]
|
||||
meas["peak_heights"] = indexes[1]["peak_heights"]
|
||||
meas["num_of_peaks"] = len(peaks)
|
||||
meas["peak_indexes"] = peaks
|
||||
meas["peak_heights"] = properties["peak_heights"]
|
||||
meas["smooth_peaks"] = smooth_peaks # smoothed curve
|
||||
|
||||
return data
|
||||
|
@ -32,8 +32,7 @@ def create_uncertanities(y, y_err):
|
||||
|
||||
|
||||
def fitccl(
|
||||
data,
|
||||
keys,
|
||||
meas,
|
||||
guess,
|
||||
vary,
|
||||
constraints_min,
|
||||
@ -43,8 +42,7 @@ def fitccl(
|
||||
binning=None,
|
||||
):
|
||||
"""Made for fitting of ccl date where 1 peak is expected. Allows for combination of gaussian and linear model combination
|
||||
:param data: dictionary after peak fining
|
||||
:param keys: name of the measurement in the data dict (i.e. M123)
|
||||
:param meas: measurement in the data dict (i.e. M123)
|
||||
:param guess: initial guess for the fitting, if none, some values are added automatically in order (see below)
|
||||
:param vary: True if parameter can vary during fitting, False if it to be fixed
|
||||
:param numfit_min: minimal value on x axis for numerical integration - if none is centre of gaussian minus 3 sigma
|
||||
@ -61,8 +59,6 @@ def fitccl(
|
||||
constraints_min = [23, None, 50, 0, 0]
|
||||
constraints_min = [80, None, 1000, 0, 100]
|
||||
"""
|
||||
meas = data["Measurements"][keys]
|
||||
|
||||
if len(meas["peak_indexes"]) > 1:
|
||||
# return in case of more than 1 peaks
|
||||
print("More than 1 peak, measurement skipped")
|
||||
@ -85,7 +81,7 @@ def fitccl(
|
||||
x = bin_data(x, binning)
|
||||
y = list(meas["Counts"])
|
||||
y_err = list(np.sqrt(y)) if meas.get("sigma", None) is None else list(meas["sigma"])
|
||||
combined = bin_data(create_uncertanities(y,y_err), binning)
|
||||
combined = bin_data(create_uncertanities(y, y_err), binning)
|
||||
y = [combined[i].n for i in range(len(combined))]
|
||||
y_err = [combined[i].s for i in range(len(combined))]
|
||||
|
||||
@ -126,10 +122,10 @@ def fitccl(
|
||||
params = Parameters()
|
||||
params.add_many(
|
||||
("g_cen", guess[0], bool(vary[0]), np.min(x), np.max(x), None, None),
|
||||
("g_width", guess[1], bool(vary[1]), constraints_min[1], constraints_max[1], None, None,),
|
||||
("g_amp", guess[2], bool(vary[2]), constraints_min[2], constraints_max[2], None, None,),
|
||||
("slope", guess[3], bool(vary[3]), constraints_min[3], constraints_max[3], None, None,),
|
||||
("intercept", guess[4], bool(vary[4]), constraints_min[4], constraints_max[4], None, None,),
|
||||
("g_width", guess[1], bool(vary[1]), constraints_min[1], constraints_max[1], None, None),
|
||||
("g_amp", guess[2], bool(vary[2]), constraints_min[2], constraints_max[2], None, None),
|
||||
("slope", guess[3], bool(vary[3]), constraints_min[3], constraints_max[3], None, None),
|
||||
("intercept", guess[4], bool(vary[4]), constraints_min[4], constraints_max[4], None, None),
|
||||
)
|
||||
# the weighted fit
|
||||
result = mod.fit(
|
||||
@ -225,5 +221,3 @@ def fitccl(
|
||||
d["comps"] = comps
|
||||
d["numfit"] = [numfit_min, numfit_max]
|
||||
meas["fit"] = d
|
||||
|
||||
return data
|
||||
|
Reference in New Issue
Block a user