Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
6bd2398f5e | |||
768dd77ef5 | |||
cf6f7a8506 | |||
654d281c49 | |||
950f76d4be | |||
238f3e4fbc | |||
826363a0f5 | |||
4822121b3b | |||
56609ad5ff | |||
a9b0a8a01d | |||
aa6bcb6c6b | |||
216de442a5 | |||
9507339c2a | |||
0c158db48f |
@ -1,9 +1,9 @@
|
||||
from pyzebra.anatric import *
|
||||
from pyzebra.ccl_findpeaks import ccl_findpeaks
|
||||
from pyzebra.ccl_io import export_comm, load_1D, parse_1D
|
||||
from pyzebra.fit2 import fitccl
|
||||
from pyzebra.h5 import *
|
||||
from pyzebra.ccl_io import load_1D, parse_1D, export_comm
|
||||
from pyzebra.merge_function import unified_merge
|
||||
from pyzebra.merge_function import add_dict, unified_merge
|
||||
from pyzebra.xtal import *
|
||||
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.2.2"
|
||||
|
@ -502,7 +502,12 @@ def create():
|
||||
for s, export in zip(scan_table_source.data["scan"], scan_table_source.data["export"]):
|
||||
if not export:
|
||||
del export_data["scan"][s]
|
||||
pyzebra.export_comm(export_data, temp_file, lorentz=lorentz_toggle.active)
|
||||
pyzebra.export_comm(
|
||||
export_data,
|
||||
temp_file,
|
||||
lorentz=lorentz_toggle.active,
|
||||
hkl_precision=int(hkl_precision_select.value),
|
||||
)
|
||||
|
||||
with open(f"{temp_file}{ext}") as f:
|
||||
preview_output_textinput.value = f.read()
|
||||
@ -510,6 +515,8 @@ def create():
|
||||
preview_output_button = Button(label="Preview file", default_size=220)
|
||||
preview_output_button.on_click(preview_output_button_callback)
|
||||
|
||||
hkl_precision_select = Select(options=["2", "3", "4"], value="2", default_size=220)
|
||||
|
||||
def export_results(det_data):
|
||||
if det_data["meta"]["indices"] == "hkl":
|
||||
ext = ".comm"
|
||||
@ -522,7 +529,12 @@ def create():
|
||||
for s, export in zip(scan_table_source.data["scan"], scan_table_source.data["export"]):
|
||||
if not export:
|
||||
del export_data["scan"][s]
|
||||
pyzebra.export_comm(export_data, temp_file, lorentz=lorentz_toggle.active)
|
||||
pyzebra.export_comm(
|
||||
export_data,
|
||||
temp_file,
|
||||
lorentz=lorentz_toggle.active,
|
||||
hkl_precision=int(hkl_precision_select.value),
|
||||
)
|
||||
|
||||
with open(f"{temp_file}{ext}") as f:
|
||||
output_content = f.read()
|
||||
@ -556,7 +568,10 @@ def create():
|
||||
),
|
||||
)
|
||||
|
||||
export_layout = column(preview_output_textinput, row(preview_output_button, save_button))
|
||||
export_layout = column(
|
||||
preview_output_textinput,
|
||||
row(column(preview_output_button, hkl_precision_select), save_button),
|
||||
)
|
||||
|
||||
upload_div = Div(text="Or upload .ccl file:")
|
||||
append_upload_div = Div(text="append extra .ccl/.dat files:")
|
||||
|
@ -401,9 +401,11 @@ def create():
|
||||
)
|
||||
|
||||
async def update_config():
|
||||
config.save_as("debug.xml")
|
||||
with open("debug.xml") as f_config:
|
||||
output_config.value = f_config.read()
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/debug.xml"
|
||||
config.save_as(temp_file)
|
||||
with open(temp_file) as f_config:
|
||||
output_config.value = f_config.read()
|
||||
|
||||
doc.add_periodic_callback(update_config, 1000)
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import base64
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import tempfile
|
||||
import types
|
||||
@ -22,6 +23,7 @@ from bokeh.models import (
|
||||
Grid,
|
||||
Line,
|
||||
LinearAxis,
|
||||
MultiLine,
|
||||
MultiSelect,
|
||||
NumberEditor,
|
||||
Panel,
|
||||
@ -35,17 +37,18 @@ from bokeh.models import (
|
||||
Span,
|
||||
Spinner,
|
||||
TableColumn,
|
||||
Tabs,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
Toggle,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
from bokeh.palettes import Category10
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import AREA_METHODS
|
||||
|
||||
|
||||
javaScript = """
|
||||
setTimeout(function() {
|
||||
const filename = 'output' + js_data.data['ext']
|
||||
@ -58,10 +61,16 @@ setTimeout(function() {
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
}, 500);
|
||||
}, 3000);
|
||||
"""
|
||||
|
||||
PROPOSAL_PATH = "/afs/psi.ch/project/sinqdata/2020/zebra/"
|
||||
PLOT_TYPES = ("single scan", "overview")
|
||||
|
||||
|
||||
def color_palette(n_colors):
|
||||
palette = itertools.cycle(Category10[10])
|
||||
return list(itertools.islice(palette, n_colors))
|
||||
|
||||
|
||||
def create():
|
||||
@ -71,38 +80,68 @@ def create():
|
||||
js_data = ColumnDataSource(data=dict(cont=[], ext=[]))
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
ccl_path = os.path.join(PROPOSAL_PATH, new.strip())
|
||||
ccl_file_list = []
|
||||
for file in os.listdir(ccl_path):
|
||||
if file.endswith(".ccl"):
|
||||
ccl_file_list.append((os.path.join(ccl_path, file), file))
|
||||
file_select.options = ccl_file_list
|
||||
file_select.value = ccl_file_list[0][0]
|
||||
full_proposal_path = os.path.join(PROPOSAL_PATH, new.strip())
|
||||
dat_file_list = []
|
||||
for file in os.listdir(full_proposal_path):
|
||||
if file.endswith(".dat"):
|
||||
dat_file_list.append((os.path.join(full_proposal_path, file), file))
|
||||
file_select.options = dat_file_list
|
||||
file_select.value = dat_file_list[0][0]
|
||||
|
||||
proposal_textinput = TextInput(title="Enter proposal number:", default_size=145, disabled=True)
|
||||
proposal_textinput = TextInput(title="Enter proposal number:", default_size=145)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
scan_list = list(det_data["scan"].keys())
|
||||
file_list = []
|
||||
extra_meta = det_data.get("extra_meta", {})
|
||||
for scan_id in scan_list:
|
||||
if scan_id in extra_meta:
|
||||
f_path = extra_meta[scan_id]["original_filename"]
|
||||
else:
|
||||
f_path = det_data["meta"]["original_filename"]
|
||||
|
||||
_, f_name = os.path.split(f_path)
|
||||
file_list.append(f_name)
|
||||
|
||||
scan_table_source.data.update(
|
||||
file=list(det_data.keys()),
|
||||
param=[""] * len(det_data),
|
||||
peaks=[0] * len(det_data),
|
||||
fit=[0] * len(det_data),
|
||||
export=[True] * len(det_data),
|
||||
file=file_list,
|
||||
scan=scan_list,
|
||||
param=[""] * len(scan_list),
|
||||
peaks=[0] * len(scan_list),
|
||||
fit=[0] * len(scan_list),
|
||||
export=[True] * len(scan_list),
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
def file_select_callback(_attr, _old, new):
|
||||
def file_select_callback(_attr, _old, _new):
|
||||
pass
|
||||
|
||||
file_select = Select(title="Available .dat files")
|
||||
file_select.on_change("value", file_select_callback)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal det_data
|
||||
with open(new) as file:
|
||||
_, ext = os.path.splitext(new)
|
||||
with open(file_select.value) as file:
|
||||
_, ext = os.path.splitext(file_select.value)
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_select = Select(title="Available .dat files", disabled=True)
|
||||
file_select.on_change("value", file_select_callback)
|
||||
file_open_button = Button(label="Open", default_size=100)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
with open(file_select.value) as file:
|
||||
_, ext = os.path.splitext(file_select.value)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.add_dict(det_data, append_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", default_size=100)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
nonlocal det_data
|
||||
@ -110,7 +149,11 @@ def create():
|
||||
for f_str, f_name in zip(new, upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
det_data[f_name] = pyzebra.parse_1D(file, ext)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.add_dict(det_data, append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
@ -118,11 +161,11 @@ def create():
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
|
||||
def append_upload_button_callback(_attr, _old, new):
|
||||
nonlocal det_data
|
||||
for f_str, f_name in zip(new, append_upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
det_data[f_name] = pyzebra.parse_1D(file, ext)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.add_dict(det_data, append_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
@ -130,11 +173,15 @@ def create():
|
||||
append_upload_button.on_change("value", append_upload_button_callback)
|
||||
|
||||
def _update_table():
|
||||
num_of_peaks = [len(scan["scan"][1].get("peak_indexes", [])) for scan in det_data.values()]
|
||||
fit_ok = [(1 if "fit" in scan["scan"][1] else 0) for scan in det_data.values()]
|
||||
num_of_peaks = [len(scan.get("peak_indexes", [])) for scan in det_data["scan"].values()]
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data["scan"].values()]
|
||||
scan_table_source.data.update(peaks=num_of_peaks, fit=fit_ok)
|
||||
|
||||
def _update_plot(scan):
|
||||
def _update_plot():
|
||||
_update_single_scan_plot(_get_selected_scan())
|
||||
_update_overview()
|
||||
|
||||
def _update_single_scan_plot(scan):
|
||||
nonlocal peak_pos_textinput_lock
|
||||
peak_pos_textinput_lock = True
|
||||
|
||||
@ -203,6 +250,19 @@ def create():
|
||||
numfit_min_span.location = None
|
||||
numfit_max_span.location = None
|
||||
|
||||
def _update_overview():
|
||||
xs = []
|
||||
ys = []
|
||||
param = []
|
||||
for ind, p in enumerate(scan_table_source.data["param"]):
|
||||
if p:
|
||||
s = scan_table_source.data["scan"][ind]
|
||||
xs.append(np.array(det_data["scan"][s]["om"]))
|
||||
ys.append(np.array(det_data["scan"][s]["Counts"]))
|
||||
param.append(float(p))
|
||||
|
||||
ov_plot_mline_source.data.update(xs=xs, ys=ys, param=param, color=color_palette(len(xs)))
|
||||
|
||||
# Main plot
|
||||
plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
@ -213,32 +273,77 @@ def create():
|
||||
plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
plot_scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
|
||||
plot.add_glyph(plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue"))
|
||||
plot.add_layout(Whisker(source=plot_scatter_source, base="x", upper="y_upper", lower="y_lower"))
|
||||
plot.add_glyph(
|
||||
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue", name="single scan")
|
||||
)
|
||||
plot.add_layout(
|
||||
Whisker(
|
||||
source=plot_scatter_source,
|
||||
base="x",
|
||||
upper="y_upper",
|
||||
lower="y_lower",
|
||||
name="single scan",
|
||||
)
|
||||
)
|
||||
|
||||
plot_line_smooth_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot.add_glyph(
|
||||
plot_line_smooth_source, Line(x="x", y="y", line_color="steelblue", line_dash="dashed")
|
||||
plot_line_smooth_source,
|
||||
Line(x="x", y="y", line_color="steelblue", line_dash="dashed", name="single scan"),
|
||||
)
|
||||
|
||||
plot_gauss_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot.add_glyph(plot_gauss_source, Line(x="x", y="y", line_color="red", line_dash="dashed"))
|
||||
plot.add_glyph(
|
||||
plot_gauss_source,
|
||||
Line(x="x", y="y", line_color="red", line_dash="dashed", name="single scan"),
|
||||
)
|
||||
|
||||
plot_bkg_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot.add_glyph(plot_bkg_source, Line(x="x", y="y", line_color="green", line_dash="dashed"))
|
||||
plot.add_glyph(
|
||||
plot_bkg_source,
|
||||
Line(x="x", y="y", line_color="green", line_dash="dashed", name="single scan"),
|
||||
)
|
||||
|
||||
plot_peak_source = ColumnDataSource(dict(x=[], y=[]))
|
||||
plot.add_glyph(plot_peak_source, Asterisk(x="x", y="y", size=10, line_color="red"))
|
||||
plot.add_glyph(
|
||||
plot_peak_source, Asterisk(x="x", y="y", size=10, line_color="red", name="single scan")
|
||||
)
|
||||
|
||||
numfit_min_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
numfit_min_span = Span(
|
||||
location=None, dimension="height", line_dash="dashed", name="single scan"
|
||||
)
|
||||
plot.add_layout(numfit_min_span)
|
||||
|
||||
numfit_max_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
numfit_max_span = Span(
|
||||
location=None, dimension="height", line_dash="dashed", name="single scan"
|
||||
)
|
||||
plot.add_layout(numfit_max_span)
|
||||
|
||||
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
plot.toolbar.logo = None
|
||||
|
||||
# Overview multilines plot
|
||||
ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Omega"), place="below")
|
||||
|
||||
ov_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
ov_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
ov_plot_mline_source = ColumnDataSource(dict(xs=[], ys=[], param=[], color=[]))
|
||||
ov_plot.add_glyph(
|
||||
ov_plot_mline_source, MultiLine(xs="xs", ys="ys", line_color="color", name="overview")
|
||||
)
|
||||
|
||||
ov_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
ov_plot.toolbar.logo = None
|
||||
|
||||
# Plot tabs
|
||||
plots = Tabs(
|
||||
tabs=[Panel(child=plot, title="single scan"), Panel(child=ov_plot, title="overview")]
|
||||
)
|
||||
|
||||
# Scan select
|
||||
def scan_table_select_callback(_attr, old, new):
|
||||
if not new:
|
||||
@ -255,31 +360,38 @@ def create():
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
f_name = scan_table_source.data["file"][new[0]]
|
||||
_update_plot(det_data[f_name]["scan"][1])
|
||||
_update_plot()
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(file=[], param=[], peaks=[], fit=[], export=[]))
|
||||
scan_table_source = ColumnDataSource(
|
||||
dict(file=[], scan=[], param=[], peaks=[], fit=[], export=[])
|
||||
)
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="file", title="file", width=150),
|
||||
TableColumn(field="scan", title="scan", width=50),
|
||||
TableColumn(field="param", title="param", width=50),
|
||||
TableColumn(field="peaks", title="Peaks", width=50),
|
||||
TableColumn(field="fit", title="Fit", width=50),
|
||||
TableColumn(field="export", title="Export", editor=CheckboxEditor(), width=50),
|
||||
],
|
||||
width=350,
|
||||
width=400,
|
||||
index_position=None,
|
||||
editable=True,
|
||||
fit_columns=False,
|
||||
)
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
if scan_table_source.selected.indices:
|
||||
_update_plot()
|
||||
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
|
||||
def _get_selected_scan():
|
||||
selected_index = scan_table_source.selected.indices[0]
|
||||
selected_file_name = scan_table_source.data["file"][selected_index]
|
||||
return det_data[selected_file_name]["scan"][1]
|
||||
selected_scan_id = scan_table_source.data["scan"][selected_index]
|
||||
return det_data["scan"][selected_scan_id]
|
||||
|
||||
def peak_pos_textinput_callback(_attr, _old, new):
|
||||
if new is not None and not peak_pos_textinput_lock:
|
||||
@ -289,7 +401,7 @@ def create():
|
||||
scan["peak_indexes"] = np.array([peak_ind], dtype=np.int64)
|
||||
scan["peak_heights"] = np.array([scan["smooth_peaks"][peak_ind]])
|
||||
_update_table()
|
||||
_update_plot(scan)
|
||||
_update_plot()
|
||||
|
||||
peak_pos_textinput = TextInput(title="Peak position:", default_size=145)
|
||||
peak_pos_textinput.on_change("value", peak_pos_textinput_callback)
|
||||
@ -419,11 +531,11 @@ def create():
|
||||
|
||||
def peakfind_all_button_callback():
|
||||
peakfind_params = _get_peakfind_params()
|
||||
for dat_file in det_data.values():
|
||||
pyzebra.ccl_findpeaks(dat_file["scan"][1], **peakfind_params)
|
||||
for scan in det_data["scan"].values():
|
||||
pyzebra.ccl_findpeaks(scan, **peakfind_params)
|
||||
|
||||
_update_table()
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_plot()
|
||||
|
||||
peakfind_all_button = Button(label="Peak Find All", button_type="primary", default_size=145)
|
||||
peakfind_all_button.on_click(peakfind_all_button_callback)
|
||||
@ -433,7 +545,7 @@ def create():
|
||||
pyzebra.ccl_findpeaks(scan, **_get_peakfind_params())
|
||||
|
||||
_update_table()
|
||||
_update_plot(scan)
|
||||
_update_plot()
|
||||
|
||||
peakfind_button = Button(label="Peak Find Current", default_size=145)
|
||||
peakfind_button.on_click(peakfind_button_callback)
|
||||
@ -451,11 +563,11 @@ def create():
|
||||
|
||||
def fit_all_button_callback():
|
||||
fit_params = _get_fit_params()
|
||||
for dat_file in det_data.values():
|
||||
for scan in det_data["scan"].values():
|
||||
# fit_params are updated inplace within `fitccl`
|
||||
pyzebra.fitccl(dat_file["scan"][1], **deepcopy(fit_params))
|
||||
pyzebra.fitccl(scan, **deepcopy(fit_params))
|
||||
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_all_button = Button(label="Fit All", button_type="primary", default_size=145)
|
||||
@ -465,7 +577,7 @@ def create():
|
||||
scan = _get_selected_scan()
|
||||
pyzebra.fitccl(scan, **_get_fit_params())
|
||||
|
||||
_update_plot(scan)
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_button = Button(label="Fit Current", default_size=145)
|
||||
@ -475,17 +587,15 @@ def create():
|
||||
det_data["meta"]["area_method"] = AREA_METHODS[new]
|
||||
|
||||
area_method_radiobutton = RadioButtonGroup(
|
||||
labels=["Fit area", "Int area"], active=0, default_size=145, disabled=True
|
||||
labels=["Fit area", "Int area"], active=0, default_size=145,
|
||||
)
|
||||
area_method_radiobutton.on_change("active", area_method_radiobutton_callback)
|
||||
|
||||
bin_size_spinner = Spinner(title="Bin size:", value=1, low=1, step=1, default_size=145)
|
||||
|
||||
lorentz_toggle = Toggle(label="Lorentz Correction", default_size=145, disabled=True)
|
||||
lorentz_toggle = Toggle(label="Lorentz Correction", default_size=145)
|
||||
|
||||
preview_output_textinput = TextAreaInput(
|
||||
title="Export file preview:", width=450, height=400, disabled=True
|
||||
)
|
||||
preview_output_textinput = TextAreaInput(title="Export file preview:", width=450, height=400)
|
||||
|
||||
def preview_output_button_callback():
|
||||
if det_data["meta"]["indices"] == "hkl":
|
||||
@ -504,7 +614,7 @@ def create():
|
||||
with open(f"{temp_file}{ext}") as f:
|
||||
preview_output_textinput.value = f.read()
|
||||
|
||||
preview_output_button = Button(label="Preview file", default_size=220, disabled=True)
|
||||
preview_output_button = Button(label="Preview file", default_size=220)
|
||||
preview_output_button.on_click(preview_output_button_callback)
|
||||
|
||||
def export_results(det_data):
|
||||
@ -530,9 +640,7 @@ def create():
|
||||
cont, ext = export_results(det_data)
|
||||
js_data.data.update(cont=[cont], ext=[ext])
|
||||
|
||||
save_button = Button(
|
||||
label="Download file", button_type="success", default_size=220, disabled=True
|
||||
)
|
||||
save_button = Button(label="Download file", button_type="success", default_size=220)
|
||||
save_button.on_click(save_button_callback)
|
||||
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
|
||||
|
||||
@ -560,14 +668,18 @@ def create():
|
||||
upload_div = Div(text="Or upload .dat files:")
|
||||
append_upload_div = Div(text="append extra .dat files:")
|
||||
tab_layout = column(
|
||||
row(proposal_textinput, file_select),
|
||||
row(
|
||||
proposal_textinput,
|
||||
file_select,
|
||||
column(Spacer(height=19), row(file_open_button, file_append_button)),
|
||||
),
|
||||
row(
|
||||
column(Spacer(height=5), upload_div),
|
||||
upload_button,
|
||||
column(Spacer(height=5), append_upload_div),
|
||||
append_upload_button,
|
||||
),
|
||||
row(scan_table, plot, Spacer(width=30), fit_output_textinput, export_layout),
|
||||
row(scan_table, plots, Spacer(width=30), fit_output_textinput, export_layout),
|
||||
row(findpeak_controls, Spacer(width=30), fitpeak_controls),
|
||||
)
|
||||
|
||||
|
@ -222,7 +222,7 @@ def parse_1D(fileobj, data_type):
|
||||
return {"meta": metadata, "scan": scan}
|
||||
|
||||
|
||||
def export_comm(data, path, lorentz=False):
|
||||
def export_comm(data, path, lorentz=False, hkl_precision=2):
|
||||
"""exports data in the *.comm format
|
||||
:param lorentz: perform Lorentz correction
|
||||
:param path: path to file + name
|
||||
@ -247,7 +247,7 @@ def export_comm(data, path, lorentz=False):
|
||||
if data["meta"]["indices"] == "hkl":
|
||||
hkl_str = f"{int(h):6}{int(k):6}{int(l):6}"
|
||||
else: # data["meta"]["indices"] == "real"
|
||||
hkl_str = f"{h:8.4g}{k:8.4g}{l:8.4g}"
|
||||
hkl_str = f"{h:8.{hkl_precision}f}{k:8.{hkl_precision}f}{l:8.{hkl_precision}f}"
|
||||
|
||||
area_method = data["meta"]["area_method"]
|
||||
area_n = scan["fit"][area_method].n
|
||||
|
@ -175,6 +175,9 @@ def merge_dups(dictionary, angles):
|
||||
def add_scan(dict1, dict2, scan_to_add):
|
||||
max_scan = np.max(list(dict1["scan"]))
|
||||
dict1["scan"][max_scan + 1] = dict2["scan"][scan_to_add]
|
||||
if dict1.get("extra_meta") is None:
|
||||
dict1["extra_meta"] = {}
|
||||
dict1["extra_meta"][max_scan + 1] = dict2["meta"]
|
||||
del dict2["scan"][scan_to_add]
|
||||
|
||||
|
||||
@ -220,7 +223,7 @@ def process(dict1, dict2, angles, precision):
|
||||
"""
|
||||
1. check for bisecting or normal beam geometry in data files; select stt, om, chi, phi for bisecting; select stt, om, nu for normal beam
|
||||
2. in the ccl files, check for identical stt, chi and nu within 0.1 degree, and, at the same time, for identical om and phi within 0.05 degree;
|
||||
3. in the dat files, check for identical stt, chi and nu within 0.1 degree, and, at the same time,
|
||||
3. in the dat files, check for identical stt, chi and nu within 0.1 degree, and, at the same time,
|
||||
for identical phi within 0.05 degree, and, at the same time, for identical om within 5 degree."""
|
||||
|
||||
|
||||
@ -277,11 +280,16 @@ def add_dict(dict1, dict2):
|
||||
new_filenames = np.arange(
|
||||
max_measurement_dict1 + 1, max_measurement_dict1 + 1 + len(dict2["scan"])
|
||||
)
|
||||
|
||||
if dict1.get("extra_meta") is None:
|
||||
dict1["extra_meta"] = {}
|
||||
|
||||
new_meta_name = "meta" + str(dict2["meta"]["original_filename"])
|
||||
if new_meta_name not in dict1:
|
||||
for keys, name in zip(dict2["scan"], new_filenames):
|
||||
dict2["scan"][keys]["file_of_origin"] = str(dict2["meta"]["original_filename"])
|
||||
dict1["scan"][name] = dict2["scan"][keys]
|
||||
dict1["extra_meta"][name] = dict2["meta"]
|
||||
|
||||
dict1[new_meta_name] = dict2["meta"]
|
||||
else:
|
||||
|
4
scripts/pyzebra-start.sh
Normal file
4
scripts/pyzebra-start.sh
Normal file
@ -0,0 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate prod
|
||||
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80
|
4
scripts/pyzebra-test-start.sh
Normal file
4
scripts/pyzebra-test-start.sh
Normal file
@ -0,0 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate test
|
||||
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006
|
11
scripts/pyzebra-test.service
Normal file
11
scripts/pyzebra-test.service
Normal file
@ -0,0 +1,11 @@
|
||||
[Unit]
|
||||
Description=pyzebra-test web server (runs on port 5006)
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=pyzebra
|
||||
ExecStart=/bin/bash /usr/local/sbin/pyzebra-test-start.sh
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
10
scripts/pyzebra.service
Normal file
10
scripts/pyzebra.service
Normal file
@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=pyzebra web server
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/bin/bash /usr/local/sbin/pyzebra-start.sh
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Reference in New Issue
Block a user