Compare commits
55 Commits
Author | SHA1 | Date | |
---|---|---|---|
960ce0a534 | |||
1d43a952e6 | |||
9f7a7b8bbf | |||
8129b5e683 | |||
eaa6c4a2ad | |||
c2be907113 | |||
4dae756b3e | |||
a77a40618d | |||
a73c34b06f | |||
4b9f0a8c36 | |||
9f56921072 | |||
49a6bd22ae | |||
5b502b31eb | |||
20e99c35ba | |||
abf4750030 | |||
5de09d16ca | |||
5c4362d984 | |||
8d065b85a4 | |||
c86466b470 | |||
b8968192ca | |||
4745f0f401 | |||
9f6e7230fa | |||
089a0cf5ac | |||
639dc070c3 | |||
fec463398d | |||
b6d7a52b06 | |||
d6e599d4f9 | |||
d6b27fb33a | |||
bae15ee2ef | |||
c2bd6c25f5 | |||
cf6527af13 | |||
57e503fc3d | |||
c10efeb9cc | |||
137f20cc20 | |||
531463a637 | |||
e3368c1817 | |||
313bd8bc62 | |||
fe61d3c4cb | |||
f6d9f63863 | |||
620f32446a | |||
4b4d5c16ce | |||
91b9e01441 | |||
18ea894f35 | |||
9141ac49c7 | |||
2adbcc6bcd | |||
b39d970960 | |||
b11004bf0f | |||
6c2e221595 | |||
502a4b8096 | |||
3fe4fca96a | |||
a3e3e6768f | |||
0b6a58e160 | |||
09d22e7674 | |||
90387174e5 | |||
e99edbaf72 |
1
.github/workflows/deployment.yaml
vendored
1
.github/workflows/deployment.yaml
vendored
@ -16,7 +16,6 @@ jobs:
|
||||
run: |
|
||||
$CONDA/bin/conda install --quiet --yes conda-build anaconda-client
|
||||
$CONDA/bin/conda config --append channels conda-forge
|
||||
$CONDA/bin/conda config --set channel_priority strict
|
||||
$CONDA/bin/conda config --set anaconda_upload yes
|
||||
|
||||
- name: Build and upload
|
||||
|
@ -22,11 +22,9 @@ requirements:
|
||||
- numpy
|
||||
- scipy
|
||||
- h5py
|
||||
- bokeh =2.3
|
||||
- matplotlib
|
||||
- bokeh =2.4
|
||||
- numba
|
||||
- lmfit
|
||||
- uncertainties
|
||||
|
||||
|
||||
about:
|
||||
|
@ -1,7 +1,8 @@
|
||||
from pyzebra.anatric import *
|
||||
from pyzebra.ccl_io import *
|
||||
from pyzebra.h5 import *
|
||||
from pyzebra.xtal import *
|
||||
from pyzebra.ccl_process import *
|
||||
from pyzebra.h5 import *
|
||||
from pyzebra.utils import *
|
||||
from pyzebra.xtal import *
|
||||
|
||||
__version__ = "0.3.2"
|
||||
__version__ = "0.5.1"
|
||||
|
@ -7,6 +7,7 @@ DATA_FACTORY_IMPLEMENTATION = [
|
||||
"morph",
|
||||
"d10",
|
||||
]
|
||||
|
||||
REFLECTION_PRINTER_FORMATS = [
|
||||
"rafin",
|
||||
"rafinf",
|
||||
@ -20,10 +21,11 @@ REFLECTION_PRINTER_FORMATS = [
|
||||
"oksana",
|
||||
]
|
||||
|
||||
ANATRIC_PATH = "/afs/psi.ch/project/sinq/rhel7/bin/anatric"
|
||||
ALGORITHMS = ["adaptivemaxcog", "adaptivedynamic"]
|
||||
|
||||
|
||||
def anatric(config_file, anatric_path="/afs/psi.ch/project/sinq/rhel7/bin/anatric", cwd=None):
|
||||
def anatric(config_file, anatric_path=ANATRIC_PATH, cwd=None):
|
||||
comp_proc = subprocess.run(
|
||||
[anatric_path, config_file],
|
||||
stdout=subprocess.PIPE,
|
||||
|
@ -2,17 +2,18 @@ import logging
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
import pyzebra
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import Tabs, TextAreaInput
|
||||
from bokeh.models import Button, Panel, Tabs, TextAreaInput, TextInput
|
||||
|
||||
import panel_ccl_integrate
|
||||
import panel_hdf_anatric
|
||||
import panel_hdf_param_study
|
||||
import panel_hdf_viewer
|
||||
import panel_param_study
|
||||
import panel_spind
|
||||
|
||||
|
||||
doc = curdoc()
|
||||
|
||||
sys.stdout = StringIO()
|
||||
@ -25,16 +26,40 @@ bokeh_logger = logging.getLogger("bokeh")
|
||||
bokeh_logger.addHandler(bokeh_handler)
|
||||
bokeh_log_textareainput = TextAreaInput(title="server output:", height=150)
|
||||
|
||||
# Final layout
|
||||
tab_hdf_viewer = panel_hdf_viewer.create()
|
||||
tab_hdf_anatric = panel_hdf_anatric.create()
|
||||
tab_ccl_integrate = panel_ccl_integrate.create()
|
||||
tab_param_study = panel_param_study.create()
|
||||
tab_spind = panel_spind.create()
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
apply_button.disabled = False
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", name="")
|
||||
proposal_textinput.on_change("value_input", proposal_textinput_callback)
|
||||
doc.proposal_textinput = proposal_textinput
|
||||
|
||||
def apply_button_callback():
|
||||
try:
|
||||
proposal_path = pyzebra.find_proposal_path(proposal_textinput.value)
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
return
|
||||
|
||||
proposal_textinput.name = proposal_path
|
||||
apply_button.disabled = True
|
||||
|
||||
apply_button = Button(label="Apply", button_type="primary")
|
||||
apply_button.on_click(apply_button_callback)
|
||||
|
||||
# Final layout
|
||||
doc.add_root(
|
||||
column(
|
||||
Tabs(tabs=[tab_hdf_viewer, tab_hdf_anatric, tab_ccl_integrate, tab_param_study, tab_spind]),
|
||||
Tabs(
|
||||
tabs=[
|
||||
Panel(child=column(proposal_textinput, apply_button), title="user config"),
|
||||
panel_hdf_viewer.create(),
|
||||
panel_hdf_anatric.create(),
|
||||
panel_ccl_integrate.create(),
|
||||
panel_param_study.create(),
|
||||
panel_hdf_param_study.create(),
|
||||
panel_spind.create(),
|
||||
]
|
||||
),
|
||||
row(stdout_textareainput, bokeh_log_textareainput, sizing_mode="scale_both"),
|
||||
)
|
||||
)
|
||||
|
@ -6,6 +6,7 @@ from bokeh.application.application import Application
|
||||
from bokeh.application.handlers import ScriptHandler
|
||||
from bokeh.server.server import Server
|
||||
|
||||
from pyzebra.anatric import ANATRIC_PATH
|
||||
from pyzebra.app.handler import PyzebraHandler
|
||||
|
||||
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.INFO)
|
||||
@ -38,10 +39,11 @@ def main():
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--anatric-path",
|
||||
type=str,
|
||||
default=None,
|
||||
help="path to anatric executable",
|
||||
"--anatric-path", type=str, default=ANATRIC_PATH, help="path to anatric executable",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--spind-path", type=str, default=None, help="path to spind scripts folder",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
@ -55,7 +57,7 @@ def main():
|
||||
|
||||
logger.info(app_path)
|
||||
|
||||
pyzebra_handler = PyzebraHandler(args.anatric_path)
|
||||
pyzebra_handler = PyzebraHandler(args.anatric_path, args.spind_path)
|
||||
handler = ScriptHandler(filename=app_path, argv=args.args)
|
||||
server = Server(
|
||||
{"/": Application(pyzebra_handler, handler)},
|
||||
|
@ -5,7 +5,7 @@ class PyzebraHandler(Handler):
|
||||
"""Provides a mechanism for generic bokeh applications to build up new streamvis documents.
|
||||
"""
|
||||
|
||||
def __init__(self, anatric_path):
|
||||
def __init__(self, anatric_path, spind_path):
|
||||
"""Initialize a pyzebra handler for bokeh applications.
|
||||
|
||||
Args:
|
||||
@ -14,6 +14,7 @@ class PyzebraHandler(Handler):
|
||||
super().__init__() # no-op
|
||||
|
||||
self.anatric_path = anatric_path
|
||||
self.spind_path = spind_path
|
||||
|
||||
def modify_document(self, doc):
|
||||
"""Modify an application document with pyzebra specific features.
|
||||
@ -26,5 +27,6 @@ class PyzebraHandler(Handler):
|
||||
"""
|
||||
doc.title = "pyzebra"
|
||||
doc.anatric_path = self.anatric_path
|
||||
doc.spind_path = self.spind_path
|
||||
|
||||
return doc
|
||||
|
@ -5,6 +5,7 @@ import tempfile
|
||||
import types
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
@ -28,7 +29,7 @@ from bokeh.models import (
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioButtonGroup,
|
||||
RadioGroup,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
@ -37,13 +38,13 @@ from bokeh.models import (
|
||||
Spinner,
|
||||
TableColumn,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import AREA_METHODS
|
||||
from pyzebra.ccl_io import EXPORT_TARGETS
|
||||
from pyzebra.ccl_process import AREA_METHODS
|
||||
|
||||
|
||||
javaScript = """
|
||||
@ -57,7 +58,7 @@ for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i];
|
||||
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
@ -69,22 +70,33 @@ for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data = {}
|
||||
fit_params = {}
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""]))
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""], ext=["", ""]))
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
proposal = new.strip()
|
||||
year = new[:4]
|
||||
proposal_path = f"/afs/psi.ch/project/sinqdata/{year}/zebra/{proposal}"
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
def file_select_update_for_proposal():
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
file_open_button.disabled = False
|
||||
file_append_button.disabled = False
|
||||
else:
|
||||
file_select.options = []
|
||||
file_open_button.disabled = True
|
||||
file_append_button.disabled = True
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", width=210)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update_for_proposal()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
scan_list = [s["idx"] for s in det_data]
|
||||
@ -97,46 +109,45 @@ def create():
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)]
|
||||
merge_source_select.options = merge_options
|
||||
merge_source_select.value = merge_options[0][0]
|
||||
merge_dest_select.options = merge_options
|
||||
merge_dest_select.value = merge_options[0][0]
|
||||
merge_from_select.options = merge_options
|
||||
merge_from_select.value = merge_options[0][0]
|
||||
|
||||
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
for f_ind, f_path in enumerate(file_select.value):
|
||||
with open(f_path) as file:
|
||||
base, ext = os.path.splitext(os.path.basename(f_path))
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
if f_ind == 0: # first file
|
||||
det_data = file_data
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base, base])
|
||||
else:
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
file_open_button = Button(label="Open New", width=100)
|
||||
file_open_button = Button(label="Open New", width=100, disabled=True)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
_, ext = os.path.splitext(f_path)
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", width=100)
|
||||
file_append_button = Button(label="Append", width=100, disabled=True)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
@ -145,17 +156,19 @@ def create():
|
||||
for f_str, f_name in zip(new, upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
if not det_data: # first file
|
||||
det_data = file_data
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base, base])
|
||||
else:
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
@ -165,37 +178,40 @@ def create():
|
||||
for f_str, f_name in zip(new, append_upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
|
||||
append_upload_button.on_change("value", append_upload_button_callback)
|
||||
|
||||
def monitor_spinner_callback(_attr, old, new):
|
||||
if det_data:
|
||||
pyzebra.normalize_dataset(det_data, new)
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_plot()
|
||||
|
||||
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
|
||||
monitor_spinner.on_change("value", monitor_spinner_callback)
|
||||
|
||||
def _update_table():
|
||||
def _update_datatable():
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok)
|
||||
export = [scan.get("active", True) for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok, export=export)
|
||||
|
||||
def _update_plot(scan):
|
||||
def _update_plot():
|
||||
scan = _get_selected_scan()
|
||||
scan_motor = scan["scan_motor"]
|
||||
|
||||
y = scan["Counts"]
|
||||
y = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x = scan[scan_motor]
|
||||
|
||||
plot.axis[0].axis_label = scan_motor
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + np.sqrt(y), y_lower=y - np.sqrt(y))
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
|
||||
|
||||
fit = scan.get("fit")
|
||||
if fit is not None:
|
||||
@ -298,13 +314,14 @@ def create():
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
_update_plot(det_data[new[0]])
|
||||
_update_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
_update_preview()
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(scan=[], hkl=[], fit=[], export=[]))
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
@ -320,28 +337,34 @@ def create():
|
||||
editable=True,
|
||||
)
|
||||
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
def _get_selected_scan():
|
||||
return det_data[scan_table_source.selected.indices[0]]
|
||||
|
||||
merge_dest_select = Select(title="destination:", width=100)
|
||||
merge_source_select = Select(title="source:", width=100)
|
||||
merge_from_select = Select(title="scan:", width=145)
|
||||
|
||||
def merge_button_callback():
|
||||
scan_dest_ind = int(merge_dest_select.value)
|
||||
scan_source_ind = int(merge_source_select.value)
|
||||
scan_into = _get_selected_scan()
|
||||
scan_from = det_data[int(merge_from_select.value)]
|
||||
|
||||
if scan_dest_ind == scan_source_ind:
|
||||
if scan_into is scan_from:
|
||||
print("WARNING: Selected scans for merging are identical")
|
||||
return
|
||||
|
||||
pyzebra.merge_scans(det_data[scan_dest_ind], det_data[scan_source_ind])
|
||||
_update_plot(_get_selected_scan())
|
||||
pyzebra.merge_scans(scan_into, scan_from)
|
||||
_update_datatable()
|
||||
_update_plot()
|
||||
|
||||
merge_button = Button(label="Merge scans", width=145)
|
||||
merge_button = Button(label="Merge into current", width=145)
|
||||
merge_button.on_click(merge_button_callback)
|
||||
|
||||
def restore_button_callback():
|
||||
pyzebra.restore_scan(_get_selected_scan())
|
||||
_update_datatable()
|
||||
_update_plot()
|
||||
|
||||
restore_button = Button(label="Restore scan", width=145)
|
||||
restore_button.on_click(restore_button_callback)
|
||||
|
||||
def fit_from_spinner_callback(_attr, _old, new):
|
||||
fit_from_span.location = new
|
||||
|
||||
@ -371,7 +394,6 @@ def create():
|
||||
# ("Pseudo Voigt1", "pseudovoigt1"),
|
||||
],
|
||||
width=145,
|
||||
disabled=True,
|
||||
)
|
||||
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
|
||||
|
||||
@ -406,7 +428,7 @@ def create():
|
||||
|
||||
fitparams_select.value = []
|
||||
|
||||
fitparams_remove_button = Button(label="Remove fit function", width=145, disabled=True)
|
||||
fitparams_remove_button = Button(label="Remove fit function", width=145)
|
||||
fitparams_remove_button.on_click(fitparams_remove_button_callback)
|
||||
|
||||
def fitparams_factory(function):
|
||||
@ -462,46 +484,47 @@ def create():
|
||||
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
|
||||
|
||||
def fit_all_button_callback():
|
||||
def proc_all_button_callback():
|
||||
for scan, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_table()
|
||||
_update_plot()
|
||||
_update_datatable()
|
||||
|
||||
fit_all_button = Button(label="Fit All", button_type="primary", width=145)
|
||||
fit_all_button.on_click(fit_all_button_callback)
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def fit_button_callback():
|
||||
def proc_button_callback():
|
||||
scan = _get_selected_scan()
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot(scan)
|
||||
_update_table()
|
||||
_update_plot()
|
||||
_update_datatable()
|
||||
|
||||
fit_button = Button(label="Fit Current", width=145)
|
||||
fit_button.on_click(fit_button_callback)
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
def area_method_radiobutton_callback(_handler):
|
||||
_update_preview()
|
||||
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
|
||||
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
|
||||
|
||||
area_method_radiobutton = RadioButtonGroup(
|
||||
labels=["Fit area", "Int area"], active=0, width=145, disabled=True
|
||||
)
|
||||
area_method_radiobutton.on_click(area_method_radiobutton_callback)
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
|
||||
|
||||
def lorentz_checkbox_callback(_handler):
|
||||
_update_preview()
|
||||
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=[13, 5, 5, 5])
|
||||
lorentz_checkbox.on_click(lorentz_checkbox_callback)
|
||||
|
||||
export_preview_textinput = TextAreaInput(title="Export file preview:", width=500, height=400)
|
||||
export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400)
|
||||
|
||||
def _update_preview():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
@ -514,14 +537,13 @@ def create():
|
||||
pyzebra.export_1D(
|
||||
export_data,
|
||||
temp_file,
|
||||
area_method=AREA_METHODS[int(area_method_radiobutton.active)],
|
||||
lorentz=bool(lorentz_checkbox.active),
|
||||
export_target_select.value,
|
||||
hkl_precision=int(hkl_precision_select.value),
|
||||
)
|
||||
|
||||
exported_content = ""
|
||||
file_content = []
|
||||
for ext in (".comm", ".incomm"):
|
||||
for ext in EXPORT_TARGETS[export_target_select.value]:
|
||||
fname = temp_file + ext
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
@ -534,6 +556,16 @@ def create():
|
||||
js_data.data.update(content=file_content)
|
||||
export_preview_textinput.value = exported_content
|
||||
|
||||
def export_target_select_callback(_attr, _old, new):
|
||||
js_data.data.update(ext=EXPORT_TARGETS[new])
|
||||
_update_preview()
|
||||
|
||||
export_target_select = Select(
|
||||
title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80
|
||||
)
|
||||
export_target_select.on_change("value", export_target_select_callback)
|
||||
js_data.data.update(ext=EXPORT_TARGETS[export_target_select.value])
|
||||
|
||||
def hkl_precision_select_callback(_attr, _old, _new):
|
||||
_update_preview()
|
||||
|
||||
@ -542,28 +574,24 @@ def create():
|
||||
)
|
||||
hkl_precision_select.on_change("value", hkl_precision_select_callback)
|
||||
|
||||
save_button = Button(label="Download File", button_type="success", width=200)
|
||||
save_button = Button(label="Download File(s)", button_type="success", width=200)
|
||||
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
|
||||
|
||||
fitpeak_controls = row(
|
||||
column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
|
||||
fitparams_table,
|
||||
Spacer(width=20),
|
||||
column(
|
||||
row(fit_from_spinner, fit_to_spinner),
|
||||
row(area_method_radiobutton, lorentz_checkbox),
|
||||
row(fit_button, fit_all_button),
|
||||
),
|
||||
column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
|
||||
column(fit_to_spinner, proc_button, proc_all_button),
|
||||
)
|
||||
|
||||
scan_layout = column(
|
||||
scan_table,
|
||||
monitor_spinner,
|
||||
row(column(Spacer(height=19), merge_button), merge_dest_select, merge_source_select),
|
||||
row(monitor_spinner, column(Spacer(height=19), restore_button)),
|
||||
row(column(Spacer(height=19), merge_button), merge_from_select),
|
||||
)
|
||||
|
||||
import_layout = column(
|
||||
proposal_textinput,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
upload_div,
|
||||
@ -574,7 +602,9 @@ def create():
|
||||
|
||||
export_layout = column(
|
||||
export_preview_textinput,
|
||||
row(hkl_precision_select, column(Spacer(height=19), row(save_button))),
|
||||
row(
|
||||
export_target_select, hkl_precision_select, column(Spacer(height=19), row(save_button))
|
||||
),
|
||||
)
|
||||
|
||||
tab_layout = column(
|
||||
|
@ -347,10 +347,7 @@ def create():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/config.xml"
|
||||
config.save_as(temp_file)
|
||||
if doc.anatric_path:
|
||||
pyzebra.anatric(temp_file, anatric_path=doc.anatric_path, cwd=temp_dir)
|
||||
else:
|
||||
pyzebra.anatric(temp_file, cwd=temp_dir)
|
||||
pyzebra.anatric(temp_file, anatric_path=doc.anatric_path, cwd=temp_dir)
|
||||
|
||||
with open(os.path.join(temp_dir, config.logfile)) as f_log:
|
||||
output_log.value = f_log.read()
|
||||
|
634
pyzebra/app/panel_hdf_param_study.py
Normal file
634
pyzebra/app/panel_hdf_param_study.py
Normal file
@ -0,0 +1,634 @@
|
||||
import base64
|
||||
import io
|
||||
import math
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, gridplot, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
BoxZoomTool,
|
||||
Button,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
DataRange1d,
|
||||
DataTable,
|
||||
Div,
|
||||
FileInput,
|
||||
Grid,
|
||||
MultiSelect,
|
||||
NumberEditor,
|
||||
NumberFormatter,
|
||||
Image,
|
||||
LinearAxis,
|
||||
LinearColorMapper,
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
Range1d,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
Spinner,
|
||||
TableColumn,
|
||||
Tabs,
|
||||
Title,
|
||||
WheelZoomTool,
|
||||
)
|
||||
from bokeh.palettes import Cividis256, Greys256, Plasma256 # pylint: disable=E0611
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
IMAGE_W = 256
|
||||
IMAGE_H = 128
|
||||
IMAGE_PLOT_W = int(IMAGE_W * 2) + 52
|
||||
IMAGE_PLOT_H = int(IMAGE_H * 2) + 27
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
zebra_data = []
|
||||
det_data = {}
|
||||
cami_meta = {}
|
||||
|
||||
num_formatter = NumberFormatter(format="0.00", nan_format="")
|
||||
|
||||
def file_select_update():
|
||||
if data_source.value == "proposal number":
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
else:
|
||||
file_select.options = []
|
||||
|
||||
else: # "cami file"
|
||||
if not cami_meta:
|
||||
file_select.options = []
|
||||
return
|
||||
|
||||
file_list = cami_meta["filelist"]
|
||||
file_select.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
|
||||
def data_source_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
data_source = Select(
|
||||
title="Data Source:",
|
||||
value="proposal number",
|
||||
options=["proposal number", "cami file"],
|
||||
width=210,
|
||||
)
|
||||
data_source.on_change("value", data_source_callback)
|
||||
|
||||
doc.add_periodic_callback(file_select_update, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
nonlocal cami_meta
|
||||
with io.StringIO(base64.b64decode(new).decode()) as file:
|
||||
cami_meta = pyzebra.parse_h5meta(file)
|
||||
data_source.value = "cami file"
|
||||
file_select_update()
|
||||
|
||||
upload_div = Div(text="or upload .cami file:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".cami", width=200)
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
|
||||
file_select = MultiSelect(title="Available .hdf files:", width=210, height=320)
|
||||
|
||||
def _init_datatable():
|
||||
file_list = []
|
||||
for scan in zebra_data:
|
||||
file_list.append(os.path.basename(scan["original_filename"]))
|
||||
|
||||
scan_table_source.data.update(
|
||||
file=file_list,
|
||||
param=[None] * len(zebra_data),
|
||||
frame=[None] * len(zebra_data),
|
||||
x_pos=[None] * len(zebra_data),
|
||||
y_pos=[None] * len(zebra_data),
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
param_select.value = "user defined"
|
||||
|
||||
def _update_table():
|
||||
frame = []
|
||||
x_pos = []
|
||||
y_pos = []
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
framei = scan["fit"]["frame"]
|
||||
x_posi = scan["fit"]["x_pos"]
|
||||
y_posi = scan["fit"]["y_pos"]
|
||||
else:
|
||||
framei = x_posi = y_posi = None
|
||||
|
||||
frame.append(framei)
|
||||
x_pos.append(x_posi)
|
||||
y_pos.append(y_posi)
|
||||
|
||||
scan_table_source.data.update(frame=frame, x_pos=x_pos, y_pos=y_pos)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal zebra_data
|
||||
zebra_data = []
|
||||
for f_name in file_select.value:
|
||||
zebra_data.append(pyzebra.read_detector_data(f_name))
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_open_button = Button(label="Open New", width=100)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
zebra_data.append(pyzebra.read_detector_data(f_name))
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", width=100)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
# Scan select
|
||||
def scan_table_select_callback(_attr, old, new):
|
||||
nonlocal det_data
|
||||
|
||||
if not new:
|
||||
# skip empty selections
|
||||
return
|
||||
|
||||
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
|
||||
if len(new) > 1:
|
||||
# drop selection to the previous one
|
||||
scan_table_source.selected.indices = old
|
||||
return
|
||||
|
||||
if len(old) > 1:
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
det_data = zebra_data[new[0]]
|
||||
|
||||
zebra_mode = det_data["zebra_mode"]
|
||||
if zebra_mode == "nb":
|
||||
metadata_table_source.data.update(geom=["normal beam"])
|
||||
else: # zebra_mode == "bi"
|
||||
metadata_table_source.data.update(geom=["bisecting"])
|
||||
|
||||
if "mf" in det_data:
|
||||
metadata_table_source.data.update(mf=[det_data["mf"][0]])
|
||||
else:
|
||||
metadata_table_source.data.update(mf=[None])
|
||||
|
||||
if "temp" in det_data:
|
||||
metadata_table_source.data.update(temp=[det_data["temp"][0]])
|
||||
else:
|
||||
metadata_table_source.data.update(temp=[None])
|
||||
|
||||
update_overview_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
pass
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(file=[], param=[], frame=[], x_pos=[], y_pos=[]))
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="file", title="file", width=150),
|
||||
TableColumn(
|
||||
field="param",
|
||||
title="param",
|
||||
formatter=num_formatter,
|
||||
editor=NumberEditor(),
|
||||
width=50,
|
||||
),
|
||||
TableColumn(field="frame", title="Frame", formatter=num_formatter, width=70),
|
||||
TableColumn(field="x_pos", title="X", formatter=num_formatter, width=70),
|
||||
TableColumn(field="y_pos", title="Y", formatter=num_formatter, width=70),
|
||||
],
|
||||
width=470, # +60 because of the index column
|
||||
height=420,
|
||||
editable=True,
|
||||
autosize_mode="none",
|
||||
)
|
||||
|
||||
def param_select_callback(_attr, _old, new):
|
||||
if new == "user defined":
|
||||
param = [None] * len(zebra_data)
|
||||
else:
|
||||
# TODO: which value to take?
|
||||
param = [scan[new][0] for scan in zebra_data]
|
||||
|
||||
scan_table_source.data["param"] = param
|
||||
_update_param_plot()
|
||||
|
||||
param_select = Select(
|
||||
title="Parameter:",
|
||||
options=["user defined", "temp", "mf", "h", "k", "l"],
|
||||
value="user defined",
|
||||
width=145,
|
||||
)
|
||||
param_select.on_change("value", param_select_callback)
|
||||
|
||||
def update_overview_plot():
|
||||
h5_data = det_data["data"]
|
||||
n_im, n_y, n_x = h5_data.shape
|
||||
overview_x = np.mean(h5_data, axis=1)
|
||||
overview_y = np.mean(h5_data, axis=2)
|
||||
|
||||
# normalize for simpler colormapping
|
||||
overview_max_val = max(np.max(overview_x), np.max(overview_y))
|
||||
overview_x = 1000 * overview_x / overview_max_val
|
||||
overview_y = 1000 * overview_y / overview_max_val
|
||||
|
||||
overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x], dh=[n_im])
|
||||
overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y], dh=[n_im])
|
||||
|
||||
if proj_auto_checkbox.active:
|
||||
im_min = min(np.min(overview_x), np.min(overview_y))
|
||||
im_max = max(np.max(overview_x), np.max(overview_y))
|
||||
|
||||
proj_display_min_spinner.value = im_min
|
||||
proj_display_max_spinner.value = im_max
|
||||
|
||||
overview_plot_x_image_glyph.color_mapper.low = im_min
|
||||
overview_plot_y_image_glyph.color_mapper.low = im_min
|
||||
overview_plot_x_image_glyph.color_mapper.high = im_max
|
||||
overview_plot_y_image_glyph.color_mapper.high = im_max
|
||||
|
||||
frame_range.start = 0
|
||||
frame_range.end = n_im
|
||||
frame_range.reset_start = 0
|
||||
frame_range.reset_end = n_im
|
||||
frame_range.bounds = (0, n_im)
|
||||
|
||||
scan_motor = det_data["scan_motor"]
|
||||
overview_plot_y.axis[1].axis_label = f"Scanning motor, {scan_motor}"
|
||||
|
||||
var = det_data[scan_motor]
|
||||
var_start = var[0]
|
||||
var_end = var[-1] + (var[-1] - var[0]) / (n_im - 1)
|
||||
|
||||
scanning_motor_range.start = var_start
|
||||
scanning_motor_range.end = var_end
|
||||
scanning_motor_range.reset_start = var_start
|
||||
scanning_motor_range.reset_end = var_end
|
||||
# handle both, ascending and descending sequences
|
||||
scanning_motor_range.bounds = (min(var_start, var_end), max(var_start, var_end))
|
||||
|
||||
# shared frame ranges
|
||||
frame_range = Range1d(0, 1, bounds=(0, 1))
|
||||
scanning_motor_range = Range1d(0, 1, bounds=(0, 1))
|
||||
|
||||
det_x_range = Range1d(0, IMAGE_W, bounds=(0, IMAGE_W))
|
||||
overview_plot_x = Plot(
|
||||
title=Title(text="Projections on X-axis"),
|
||||
x_range=det_x_range,
|
||||
y_range=frame_range,
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_W - 3,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
wheelzoomtool = WheelZoomTool(maintain_focus=False)
|
||||
overview_plot_x.toolbar.logo = None
|
||||
overview_plot_x.add_tools(
|
||||
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
|
||||
)
|
||||
overview_plot_x.toolbar.active_scroll = wheelzoomtool
|
||||
|
||||
# ---- axes
|
||||
overview_plot_x.add_layout(LinearAxis(axis_label="Coordinate X, pix"), place="below")
|
||||
overview_plot_x.add_layout(
|
||||
LinearAxis(axis_label="Frame", major_label_orientation="vertical"), place="left"
|
||||
)
|
||||
|
||||
# ---- grid lines
|
||||
overview_plot_x.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
overview_plot_x.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
# ---- rgba image glyph
|
||||
overview_plot_x_image_source = ColumnDataSource(
|
||||
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_W], dh=[1])
|
||||
)
|
||||
|
||||
overview_plot_x_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
overview_plot_x.add_glyph(
|
||||
overview_plot_x_image_source, overview_plot_x_image_glyph, name="image_glyph"
|
||||
)
|
||||
|
||||
det_y_range = Range1d(0, IMAGE_H, bounds=(0, IMAGE_H))
|
||||
overview_plot_y = Plot(
|
||||
title=Title(text="Projections on Y-axis"),
|
||||
x_range=det_y_range,
|
||||
y_range=frame_range,
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_H + 22,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
wheelzoomtool = WheelZoomTool(maintain_focus=False)
|
||||
overview_plot_y.toolbar.logo = None
|
||||
overview_plot_y.add_tools(
|
||||
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
|
||||
)
|
||||
overview_plot_y.toolbar.active_scroll = wheelzoomtool
|
||||
|
||||
# ---- axes
|
||||
overview_plot_y.add_layout(LinearAxis(axis_label="Coordinate Y, pix"), place="below")
|
||||
overview_plot_y.add_layout(
|
||||
LinearAxis(
|
||||
y_range_name="scanning_motor",
|
||||
axis_label="Scanning motor",
|
||||
major_label_orientation="vertical",
|
||||
),
|
||||
place="right",
|
||||
)
|
||||
|
||||
# ---- grid lines
|
||||
overview_plot_y.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
overview_plot_y.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
# ---- rgba image glyph
|
||||
overview_plot_y_image_source = ColumnDataSource(
|
||||
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_H], dh=[1])
|
||||
)
|
||||
|
||||
overview_plot_y_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
overview_plot_y.add_glyph(
|
||||
overview_plot_y_image_source, overview_plot_y_image_glyph, name="image_glyph"
|
||||
)
|
||||
|
||||
cmap_dict = {
|
||||
"gray": Greys256,
|
||||
"gray_reversed": Greys256[::-1],
|
||||
"plasma": Plasma256,
|
||||
"cividis": Cividis256,
|
||||
}
|
||||
|
||||
def colormap_callback(_attr, _old, new):
|
||||
overview_plot_x_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
overview_plot_y_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
|
||||
colormap = Select(title="Colormap:", options=list(cmap_dict.keys()), width=210)
|
||||
colormap.on_change("value", colormap_callback)
|
||||
colormap.value = "plasma"
|
||||
|
||||
PROJ_STEP = 1
|
||||
|
||||
def proj_auto_checkbox_callback(state):
|
||||
if state:
|
||||
proj_display_min_spinner.disabled = True
|
||||
proj_display_max_spinner.disabled = True
|
||||
else:
|
||||
proj_display_min_spinner.disabled = False
|
||||
proj_display_max_spinner.disabled = False
|
||||
|
||||
update_overview_plot()
|
||||
|
||||
proj_auto_checkbox = CheckboxGroup(
|
||||
labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
proj_auto_checkbox.on_click(proj_auto_checkbox_callback)
|
||||
|
||||
def proj_display_max_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_min_spinner.high = new_value - PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.high = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.high = new_value
|
||||
|
||||
proj_display_max_spinner = Spinner(
|
||||
low=0 + PROJ_STEP,
|
||||
value=1,
|
||||
step=PROJ_STEP,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_max_spinner.on_change("value", proj_display_max_spinner_callback)
|
||||
|
||||
def proj_display_min_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_max_spinner.low = new_value + PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.low = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.low = new_value
|
||||
|
||||
proj_display_min_spinner = Spinner(
|
||||
low=0,
|
||||
high=1 - PROJ_STEP,
|
||||
value=0,
|
||||
step=PROJ_STEP,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback)
|
||||
|
||||
def fit_event(scan):
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
maxfev = 100000
|
||||
|
||||
# wave = scan["wave"]
|
||||
# ddist = scan["ddist"]
|
||||
# cell = scan["cell"]
|
||||
|
||||
# gamma = scan["gamma"][0]
|
||||
# omega = scan["omega"][0]
|
||||
# nu = scan["nu"][0]
|
||||
# chi = scan["chi"][0]
|
||||
# phi = scan["phi"][0]
|
||||
|
||||
scan_motor = scan["scan_motor"]
|
||||
var_angle = scan[scan_motor]
|
||||
|
||||
x0 = int(np.floor(det_x_range.start))
|
||||
xN = int(np.ceil(det_x_range.end))
|
||||
y0 = int(np.floor(det_y_range.start))
|
||||
yN = int(np.ceil(det_y_range.end))
|
||||
fr0 = int(np.floor(frame_range.start))
|
||||
frN = int(np.ceil(frame_range.end))
|
||||
data_roi = scan["data"][fr0:frN, y0:yN, x0:xN]
|
||||
|
||||
cnts = np.sum(data_roi, axis=(1, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(cnts)), cnts, p0=p0, maxfev=maxfev)
|
||||
|
||||
# m = cnts.mean()
|
||||
# sd = cnts.std()
|
||||
# snr_cnts = np.where(sd == 0, 0, m / sd)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
var_F = var_angle[math.floor(frC)]
|
||||
var_C = var_angle[math.ceil(frC)]
|
||||
# frStep = frC - math.floor(frC)
|
||||
var_step = var_C - var_F
|
||||
# var_p = var_F + var_step * frStep
|
||||
|
||||
# if scan_motor == "gamma":
|
||||
# gamma = var_p
|
||||
# elif scan_motor == "omega":
|
||||
# omega = var_p
|
||||
# elif scan_motor == "nu":
|
||||
# nu = var_p
|
||||
# elif scan_motor == "chi":
|
||||
# chi = var_p
|
||||
# elif scan_motor == "phi":
|
||||
# phi = var_p
|
||||
|
||||
intensity = coeff[1] * abs(coeff[2] * var_step) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
|
||||
projX = np.sum(data_roi, axis=(0, 1))
|
||||
coeff, _ = curve_fit(gauss, range(len(projX)), projX, p0=p0, maxfev=maxfev)
|
||||
x_pos = x0 + coeff[1]
|
||||
|
||||
projY = np.sum(data_roi, axis=(0, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(projY)), projY, p0=p0, maxfev=maxfev)
|
||||
y_pos = y0 + coeff[1]
|
||||
|
||||
scan["fit"] = {"frame": frC, "x_pos": x_pos, "y_pos": y_pos, "intensity": intensity}
|
||||
|
||||
metadata_table_source = ColumnDataSource(dict(geom=[""], temp=[None], mf=[None]))
|
||||
metadata_table = DataTable(
|
||||
source=metadata_table_source,
|
||||
columns=[
|
||||
TableColumn(field="geom", title="Geometry", width=100),
|
||||
TableColumn(field="temp", title="Temperature", formatter=num_formatter, width=100),
|
||||
TableColumn(field="mf", title="Magnetic Field", formatter=num_formatter, width=100),
|
||||
],
|
||||
width=300,
|
||||
height=50,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
|
||||
def _update_param_plot():
|
||||
x = []
|
||||
y = []
|
||||
fit_param = fit_param_select.value
|
||||
for s, p in zip(zebra_data, scan_table_source.data["param"]):
|
||||
if "fit" in s and fit_param:
|
||||
x.append(p)
|
||||
y.append(s["fit"][fit_param])
|
||||
param_plot_scatter_source.data.update(x=x, y=y)
|
||||
|
||||
# Parameter plot
|
||||
param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
|
||||
param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
|
||||
|
||||
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[]))
|
||||
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
|
||||
|
||||
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
param_plot.toolbar.logo = None
|
||||
|
||||
def fit_param_select_callback(_attr, _old, _new):
|
||||
_update_param_plot()
|
||||
|
||||
fit_param_select = Select(title="Fit parameter", options=[], width=145)
|
||||
fit_param_select.on_change("value", fit_param_select_callback)
|
||||
|
||||
def proc_all_button_callback():
|
||||
for scan in zebra_data:
|
||||
fit_event(scan)
|
||||
|
||||
_update_table()
|
||||
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
_update_param_plot()
|
||||
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def proc_button_callback():
|
||||
fit_event(det_data)
|
||||
|
||||
_update_table()
|
||||
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
_update_param_plot()
|
||||
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
layout_controls = row(
|
||||
colormap,
|
||||
column(proj_auto_checkbox, row(proj_display_min_spinner, proj_display_max_spinner)),
|
||||
proc_button,
|
||||
proc_all_button,
|
||||
)
|
||||
|
||||
layout_overview = column(
|
||||
gridplot(
|
||||
[[overview_plot_x, overview_plot_y]],
|
||||
toolbar_options=dict(logo=None),
|
||||
merge_tools=True,
|
||||
toolbar_location="left",
|
||||
),
|
||||
layout_controls,
|
||||
)
|
||||
|
||||
# Plot tabs
|
||||
plots = Tabs(
|
||||
tabs=[
|
||||
Panel(child=layout_overview, title="single scan"),
|
||||
Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"),
|
||||
]
|
||||
)
|
||||
|
||||
# Final layout
|
||||
import_layout = column(
|
||||
data_source,
|
||||
upload_div,
|
||||
upload_button,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
)
|
||||
|
||||
scan_layout = column(scan_table, row(param_select, metadata_table))
|
||||
|
||||
tab_layout = column(row(import_layout, scan_layout, plots))
|
||||
|
||||
return Panel(child=tab_layout, title="hdf param study")
|
||||
|
||||
|
||||
def gauss(x, *p):
|
||||
"""Defines Gaussian function
|
||||
Args:
|
||||
A - amplitude, mu - position of the center, sigma - width
|
||||
Returns:
|
||||
Gaussian function
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
@ -1,8 +1,11 @@
|
||||
import base64
|
||||
import io
|
||||
import math
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from bokeh.events import MouseEnter
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, gridplot, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
@ -34,12 +37,11 @@ from bokeh.models import (
|
||||
Spacer,
|
||||
Spinner,
|
||||
TableColumn,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
Title,
|
||||
WheelZoomTool,
|
||||
)
|
||||
from bokeh.palettes import Cividis256, Greys256, Plasma256 # pylint: disable=E0611
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
@ -50,27 +52,57 @@ IMAGE_PLOT_H = int(IMAGE_H * 2) + 27
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data = {}
|
||||
roi_selection = {}
|
||||
cami_meta = {}
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
proposal = new.strip()
|
||||
year = new[:4]
|
||||
proposal_path = f"/afs/psi.ch/project/sinqdata/{year}/zebra/{proposal}"
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
num_formatter = NumberFormatter(format="0.00", nan_format="")
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", width=210)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
def file_select_update():
|
||||
if data_source.value == "proposal number":
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
else:
|
||||
file_select.options = []
|
||||
|
||||
else: # "cami file"
|
||||
if not cami_meta:
|
||||
file_select.options = []
|
||||
return
|
||||
|
||||
file_list = cami_meta["filelist"]
|
||||
file_select.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
|
||||
def data_source_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
data_source = Select(
|
||||
title="Data Source:",
|
||||
value="proposal number",
|
||||
options=["proposal number", "cami file"],
|
||||
width=210,
|
||||
)
|
||||
data_source.on_change("value", data_source_callback)
|
||||
|
||||
doc.add_periodic_callback(file_select_update, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
nonlocal cami_meta
|
||||
with io.StringIO(base64.b64decode(new).decode()) as file:
|
||||
h5meta_list = pyzebra.parse_h5meta(file)
|
||||
file_list = h5meta_list["filelist"]
|
||||
file_select.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
cami_meta = pyzebra.parse_h5meta(file)
|
||||
data_source.value = "cami file"
|
||||
file_select_update()
|
||||
|
||||
upload_div = Div(text="or upload .cami file:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".cami", width=200)
|
||||
@ -123,6 +155,11 @@ def create():
|
||||
overview_x = np.mean(h5_data, axis=1)
|
||||
overview_y = np.mean(h5_data, axis=2)
|
||||
|
||||
# normalize for simpler colormapping
|
||||
overview_max_val = max(np.max(overview_x), np.max(overview_y))
|
||||
overview_x = 1000 * overview_x / overview_max_val
|
||||
overview_y = 1000 * overview_y / overview_max_val
|
||||
|
||||
overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x], dh=[n_im])
|
||||
overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y], dh=[n_im])
|
||||
|
||||
@ -155,7 +192,8 @@ def create():
|
||||
scanning_motor_range.end = var_end
|
||||
scanning_motor_range.reset_start = var_start
|
||||
scanning_motor_range.reset_end = var_end
|
||||
scanning_motor_range.bounds = (var_start, var_end)
|
||||
# handle both, ascending and descending sequences
|
||||
scanning_motor_range.bounds = (min(var_start, var_end), max(var_start, var_end))
|
||||
|
||||
def file_select_callback(_attr, old, new):
|
||||
nonlocal det_data
|
||||
@ -173,7 +211,7 @@ def create():
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
det_data = pyzebra.read_detector_data(new[0])
|
||||
det_data = pyzebra.read_detector_data(new[0], cami_meta)
|
||||
|
||||
index_spinner.value = 0
|
||||
index_spinner.high = det_data["data"].shape[0] - 1
|
||||
@ -255,6 +293,15 @@ def create():
|
||||
image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
plot.add_glyph(image_source, image_glyph, name="image_glyph")
|
||||
|
||||
# calculate hkl-indices of first mouse entry
|
||||
def mouse_enter_callback(_event):
|
||||
if det_data and np.array_equal(image_source.data["h"][0], np.zeros((1, 1))):
|
||||
index = index_spinner.value
|
||||
h, k, l = calculate_hkl(det_data, index)
|
||||
image_source.data.update(h=[h], k=[k], l=[l])
|
||||
|
||||
plot.on_event(MouseEnter, mouse_enter_callback)
|
||||
|
||||
# ---- projections
|
||||
proj_v = Plot(
|
||||
x_range=plot.x_range,
|
||||
@ -469,7 +516,7 @@ def create():
|
||||
update_image()
|
||||
|
||||
main_auto_checkbox = CheckboxGroup(
|
||||
labels=["Main Auto Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
labels=["Frame Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
main_auto_checkbox.on_click(main_auto_checkbox_callback)
|
||||
|
||||
@ -502,7 +549,7 @@ def create():
|
||||
)
|
||||
display_min_spinner.on_change("value", display_min_spinner_callback)
|
||||
|
||||
PROJ_STEP = 0.1
|
||||
PROJ_STEP = 1
|
||||
|
||||
def proj_auto_checkbox_callback(state):
|
||||
if state:
|
||||
@ -515,7 +562,7 @@ def create():
|
||||
update_overview_plot()
|
||||
|
||||
proj_auto_checkbox = CheckboxGroup(
|
||||
labels=["Projections Auto Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
proj_auto_checkbox.on_click(proj_auto_checkbox_callback)
|
||||
|
||||
@ -550,40 +597,134 @@ def create():
|
||||
)
|
||||
proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback)
|
||||
|
||||
def hkl_button_callback():
|
||||
index = index_spinner.value
|
||||
h, k, l = calculate_hkl(det_data, index)
|
||||
image_source.data.update(h=[h], k=[k], l=[l])
|
||||
events_data = dict(
|
||||
wave=[],
|
||||
ddist=[],
|
||||
cell=[],
|
||||
frame=[],
|
||||
x_pos=[],
|
||||
y_pos=[],
|
||||
intensity=[],
|
||||
snr_cnts=[],
|
||||
gamma=[],
|
||||
omega=[],
|
||||
chi=[],
|
||||
phi=[],
|
||||
nu=[],
|
||||
)
|
||||
doc.events_data = events_data
|
||||
|
||||
hkl_button = Button(label="Calculate hkl (slow)", width=210)
|
||||
hkl_button.on_click(hkl_button_callback)
|
||||
events_table_source = ColumnDataSource(events_data)
|
||||
events_table = DataTable(
|
||||
source=events_table_source,
|
||||
columns=[
|
||||
TableColumn(field="frame", title="Frame", formatter=num_formatter, width=70),
|
||||
TableColumn(field="x_pos", title="X", formatter=num_formatter, width=70),
|
||||
TableColumn(field="y_pos", title="Y", formatter=num_formatter, width=70),
|
||||
TableColumn(field="intensity", title="Intensity", formatter=num_formatter, width=70),
|
||||
TableColumn(field="gamma", title="Gamma", formatter=num_formatter, width=70),
|
||||
TableColumn(field="omega", title="Omega", formatter=num_formatter, width=70),
|
||||
TableColumn(field="chi", title="Chi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="phi", title="Phi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="nu", title="Nu", formatter=num_formatter, width=70),
|
||||
],
|
||||
height=150,
|
||||
width=630,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
|
||||
selection_list = TextAreaInput(rows=7)
|
||||
def add_event_button_callback():
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
maxfev = 100000
|
||||
|
||||
def selection_button_callback():
|
||||
nonlocal roi_selection
|
||||
selection = [
|
||||
int(np.floor(det_x_range.start)),
|
||||
int(np.ceil(det_x_range.end)),
|
||||
int(np.floor(det_y_range.start)),
|
||||
int(np.ceil(det_y_range.end)),
|
||||
int(np.floor(frame_range.start)),
|
||||
int(np.ceil(frame_range.end)),
|
||||
]
|
||||
wave = det_data["wave"]
|
||||
ddist = det_data["ddist"]
|
||||
cell = det_data["cell"]
|
||||
|
||||
filename_id = file_select.value[0][-8:-4]
|
||||
if filename_id in roi_selection:
|
||||
roi_selection[f"{filename_id}"].append(selection)
|
||||
else:
|
||||
roi_selection[f"{filename_id}"] = [selection]
|
||||
gamma = det_data["gamma"][0]
|
||||
omega = det_data["omega"][0]
|
||||
nu = det_data["nu"][0]
|
||||
chi = det_data["chi"][0]
|
||||
phi = det_data["phi"][0]
|
||||
|
||||
selection_list.value = str(roi_selection)
|
||||
scan_motor = det_data["scan_motor"]
|
||||
var_angle = det_data[scan_motor]
|
||||
|
||||
selection_button = Button(label="Add selection")
|
||||
selection_button.on_click(selection_button_callback)
|
||||
x0 = int(np.floor(det_x_range.start))
|
||||
xN = int(np.ceil(det_x_range.end))
|
||||
y0 = int(np.floor(det_y_range.start))
|
||||
yN = int(np.ceil(det_y_range.end))
|
||||
fr0 = int(np.floor(frame_range.start))
|
||||
frN = int(np.ceil(frame_range.end))
|
||||
data_roi = det_data["data"][fr0:frN, y0:yN, x0:xN]
|
||||
|
||||
cnts = np.sum(data_roi, axis=(1, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(cnts)), cnts, p0=p0, maxfev=maxfev)
|
||||
|
||||
m = cnts.mean()
|
||||
sd = cnts.std()
|
||||
snr_cnts = np.where(sd == 0, 0, m / sd)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
var_F = var_angle[math.floor(frC)]
|
||||
var_C = var_angle[math.ceil(frC)]
|
||||
frStep = frC - math.floor(frC)
|
||||
var_step = var_C - var_F
|
||||
var_p = var_F + var_step * frStep
|
||||
|
||||
if scan_motor == "gamma":
|
||||
gamma = var_p
|
||||
elif scan_motor == "omega":
|
||||
omega = var_p
|
||||
elif scan_motor == "nu":
|
||||
nu = var_p
|
||||
elif scan_motor == "chi":
|
||||
chi = var_p
|
||||
elif scan_motor == "phi":
|
||||
phi = var_p
|
||||
|
||||
intensity = coeff[1] * abs(coeff[2] * var_step) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
|
||||
projX = np.sum(data_roi, axis=(0, 1))
|
||||
coeff, _ = curve_fit(gauss, range(len(projX)), projX, p0=p0, maxfev=maxfev)
|
||||
x_pos = x0 + coeff[1]
|
||||
|
||||
projY = np.sum(data_roi, axis=(0, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(projY)), projY, p0=p0, maxfev=maxfev)
|
||||
y_pos = y0 + coeff[1]
|
||||
|
||||
events_data["wave"].append(wave)
|
||||
events_data["ddist"].append(ddist)
|
||||
events_data["cell"].append(cell)
|
||||
events_data["frame"].append(frC)
|
||||
events_data["x_pos"].append(x_pos)
|
||||
events_data["y_pos"].append(y_pos)
|
||||
events_data["intensity"].append(intensity)
|
||||
events_data["snr_cnts"].append(snr_cnts)
|
||||
events_data["gamma"].append(gamma)
|
||||
events_data["omega"].append(omega)
|
||||
events_data["chi"].append(chi)
|
||||
events_data["phi"].append(phi)
|
||||
events_data["nu"].append(nu)
|
||||
|
||||
events_table_source.data = events_data
|
||||
|
||||
add_event_button = Button(label="Add spind event", width=145)
|
||||
add_event_button.on_click(add_event_button_callback)
|
||||
|
||||
def remove_event_button_callback():
|
||||
ind2remove = events_table_source.selected.indices
|
||||
for value in events_data.values():
|
||||
for ind in reversed(ind2remove):
|
||||
del value[ind]
|
||||
|
||||
events_table_source.data = events_data
|
||||
|
||||
remove_event_button = Button(label="Remove spind event", width=145)
|
||||
remove_event_button.on_click(remove_event_button_callback)
|
||||
|
||||
metadata_table_source = ColumnDataSource(dict(geom=[""], temp=[None], mf=[None]))
|
||||
num_formatter = NumberFormatter(format="0.00", nan_format="")
|
||||
metadata_table = DataTable(
|
||||
source=metadata_table_source,
|
||||
columns=[
|
||||
@ -598,7 +739,7 @@ def create():
|
||||
)
|
||||
|
||||
# Final layout
|
||||
import_layout = column(proposal_textinput, upload_div, upload_button, file_select)
|
||||
import_layout = column(data_source, upload_div, upload_button, file_select)
|
||||
layout_image = column(gridplot([[proj_v, None], [plot, proj_h]], merge_tools=False))
|
||||
colormap_layout = column(
|
||||
colormap,
|
||||
@ -608,12 +749,9 @@ def create():
|
||||
row(proj_display_min_spinner, proj_display_max_spinner),
|
||||
)
|
||||
|
||||
layout_controls = row(
|
||||
column(selection_button, selection_list),
|
||||
Spacer(width=20),
|
||||
column(
|
||||
row(index_spinner, column(Spacer(height=25), index_slider)), metadata_table, hkl_button
|
||||
),
|
||||
layout_controls = column(
|
||||
row(metadata_table, index_spinner, column(Spacer(height=25), index_slider)),
|
||||
row(column(add_event_button, remove_event_button), events_table),
|
||||
)
|
||||
|
||||
layout_overview = column(
|
||||
@ -634,6 +772,17 @@ def create():
|
||||
return Panel(child=tab_layout, title="hdf viewer")
|
||||
|
||||
|
||||
def gauss(x, *p):
|
||||
"""Defines Gaussian function
|
||||
Args:
|
||||
A - amplitude, mu - position of the center, sigma - width
|
||||
Returns:
|
||||
Gaussian function
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
||||
|
||||
|
||||
def calculate_hkl(det_data, index):
|
||||
h = np.empty(shape=(IMAGE_H, IMAGE_W))
|
||||
k = np.empty(shape=(IMAGE_H, IMAGE_W))
|
||||
|
@ -6,6 +6,7 @@ import tempfile
|
||||
import types
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
@ -21,6 +22,7 @@ from bokeh.models import (
|
||||
FileInput,
|
||||
Grid,
|
||||
HoverTool,
|
||||
Image,
|
||||
Legend,
|
||||
Line,
|
||||
LinearAxis,
|
||||
@ -30,7 +32,7 @@ from bokeh.models import (
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioButtonGroup,
|
||||
RadioGroup,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
@ -40,29 +42,34 @@ from bokeh.models import (
|
||||
TableColumn,
|
||||
Tabs,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
from bokeh.palettes import Category10, Turbo256
|
||||
from bokeh.transform import linear_cmap
|
||||
from scipy import interpolate
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import AREA_METHODS
|
||||
from pyzebra.ccl_process import AREA_METHODS
|
||||
|
||||
javaScript = """
|
||||
let j = 0;
|
||||
for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
if (js_data.data['content'][i] === "") continue;
|
||||
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
setTimeout(function() {
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
}, 100 * j)
|
||||
|
||||
j++;
|
||||
}
|
||||
"""
|
||||
|
||||
@ -73,22 +80,33 @@ def color_palette(n_colors):
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data = []
|
||||
fit_params = {}
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""]))
|
||||
js_data = ColumnDataSource(data=dict(content=[""], fname=[""], ext=[""]))
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
proposal = new.strip()
|
||||
year = new[:4]
|
||||
proposal_path = f"/afs/psi.ch/project/sinqdata/{year}/zebra/{proposal}"
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
def file_select_update_for_proposal():
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
file_open_button.disabled = False
|
||||
file_append_button.disabled = False
|
||||
else:
|
||||
file_select.options = []
|
||||
file_open_button.disabled = True
|
||||
file_append_button.disabled = True
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", width=210)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update_for_proposal()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
scan_list = [s["idx"] for s in det_data]
|
||||
@ -106,42 +124,46 @@ def create():
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
scan_motor_select.options = det_data[0]["scan_motors"]
|
||||
scan_motor_select.value = det_data[0]["scan_motor"]
|
||||
param_select.value = "user defined"
|
||||
|
||||
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
for f_ind, f_path in enumerate(file_select.value):
|
||||
with open(f_path) as file:
|
||||
base, ext = os.path.splitext(os.path.basename(f_path))
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
if f_ind == 0: # first file
|
||||
det_data = file_data
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base])
|
||||
else:
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
file_open_button = Button(label="Open New", width=100)
|
||||
file_open_button = Button(label="Open New", width=100, disabled=True)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
_, ext = os.path.splitext(f_path)
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", width=100)
|
||||
file_append_button = Button(label="Append", width=100, disabled=True)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
@ -150,16 +172,19 @@ def create():
|
||||
for f_str, f_name in zip(new, upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
if not det_data: # first file
|
||||
det_data = file_data
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base])
|
||||
else:
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
@ -169,15 +194,15 @@ def create():
|
||||
for f_str, f_name in zip(new, append_upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
|
||||
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
|
||||
append_upload_button.on_change("value", append_upload_button_callback)
|
||||
|
||||
def monitor_spinner_callback(_attr, _old, new):
|
||||
@ -188,22 +213,33 @@ def create():
|
||||
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
|
||||
monitor_spinner.on_change("value", monitor_spinner_callback)
|
||||
|
||||
def scan_motor_select_callback(_attr, _old, new):
|
||||
if det_data:
|
||||
for scan in det_data:
|
||||
scan["scan_motor"] = new
|
||||
_update_plot()
|
||||
|
||||
scan_motor_select = Select(title="Scan motor:", options=[], width=145)
|
||||
scan_motor_select.on_change("value", scan_motor_select_callback)
|
||||
|
||||
def _update_table():
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok)
|
||||
|
||||
def _update_plot():
|
||||
_update_single_scan_plot(_get_selected_scan())
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
|
||||
def _update_single_scan_plot(scan):
|
||||
def _update_single_scan_plot():
|
||||
scan = _get_selected_scan()
|
||||
scan_motor = scan["scan_motor"]
|
||||
|
||||
y = scan["Counts"]
|
||||
y = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x = scan[scan_motor]
|
||||
|
||||
plot.axis[0].axis_label = scan_motor
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + np.sqrt(y), y_lower=y - np.sqrt(y))
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
|
||||
|
||||
fit = scan.get("fit")
|
||||
if fit is not None:
|
||||
@ -248,10 +284,10 @@ def create():
|
||||
scan_motor = scan["scan_motor"]
|
||||
xs.append(scan[scan_motor])
|
||||
x.extend(scan[scan_motor])
|
||||
ys.append(scan["Counts"])
|
||||
ys.append(scan["counts"])
|
||||
y.extend([float(p)] * len(scan[scan_motor]))
|
||||
param.append(float(p))
|
||||
par.extend(scan["Counts"])
|
||||
par.extend(scan["counts"])
|
||||
|
||||
if det_data:
|
||||
scan_motor = det_data[0]["scan_motor"]
|
||||
@ -265,6 +301,38 @@ def create():
|
||||
mapper["transform"].high = np.max([np.max(y) for y in ys])
|
||||
ov_param_plot_scatter_source.data.update(x=x, y=y, param=par)
|
||||
|
||||
if y:
|
||||
interp_f = interpolate.interp2d(x, y, par)
|
||||
x1, x2 = min(x), max(x)
|
||||
y1, y2 = min(y), max(y)
|
||||
image = interp_f(
|
||||
np.linspace(x1, x2, ov_param_plot.inner_width // 10),
|
||||
np.linspace(y1, y2, ov_param_plot.inner_height // 10),
|
||||
assume_sorted=True,
|
||||
)
|
||||
ov_param_plot_image_source.data.update(
|
||||
image=[image], x=[x1], y=[y1], dw=[x2 - x1], dh=[y2 - y1]
|
||||
)
|
||||
else:
|
||||
ov_param_plot_image_source.data.update(image=[], x=[], y=[], dw=[], dh=[])
|
||||
|
||||
def _update_param_plot():
|
||||
x = []
|
||||
y = []
|
||||
y_lower = []
|
||||
y_upper = []
|
||||
fit_param = fit_param_select.value
|
||||
for s, p in zip(det_data, scan_table_source.data["param"]):
|
||||
if "fit" in s and fit_param:
|
||||
x.append(p)
|
||||
param_fit_val = s["fit"].params[fit_param].value
|
||||
param_fit_std = s["fit"].params[fit_param].stderr
|
||||
y.append(param_fit_val)
|
||||
y_lower.append(param_fit_val - param_fit_std)
|
||||
y_upper.append(param_fit_val + param_fit_std)
|
||||
|
||||
param_plot_scatter_source.data.update(x=x, y=y, y_lower=y_lower, y_upper=y_upper)
|
||||
|
||||
# Main plot
|
||||
plot = Plot(
|
||||
x_range=DataRange1d(),
|
||||
@ -321,7 +389,7 @@ def create():
|
||||
plot.toolbar.logo = None
|
||||
|
||||
# Overview multilines plot
|
||||
ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=450, plot_width=700)
|
||||
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
|
||||
@ -340,7 +408,7 @@ def create():
|
||||
|
||||
# Overview perams plot
|
||||
ov_param_plot = Plot(
|
||||
x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700
|
||||
x_range=DataRange1d(), y_range=DataRange1d(), plot_height=450, plot_width=700
|
||||
)
|
||||
|
||||
ov_param_plot.add_layout(LinearAxis(axis_label="Param"), place="left")
|
||||
@ -349,6 +417,11 @@ def create():
|
||||
ov_param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
ov_param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
ov_param_plot_image_source = ColumnDataSource(dict(image=[], x=[], y=[], dw=[], dh=[]))
|
||||
ov_param_plot.add_glyph(
|
||||
ov_param_plot_image_source, Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
)
|
||||
|
||||
ov_param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[], param=[]))
|
||||
mapper = linear_cmap(field_name="param", palette=Turbo256, low=0, high=50)
|
||||
ov_param_plot.add_glyph(
|
||||
@ -359,12 +432,37 @@ def create():
|
||||
ov_param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
ov_param_plot.toolbar.logo = None
|
||||
|
||||
# Parameter plot
|
||||
param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
|
||||
param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
|
||||
|
||||
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[], y_upper=[], y_lower=[]))
|
||||
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
|
||||
param_plot.add_layout(
|
||||
Whisker(source=param_plot_scatter_source, base="x", upper="y_upper", lower="y_lower")
|
||||
)
|
||||
|
||||
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
param_plot.toolbar.logo = None
|
||||
|
||||
def fit_param_select_callback(_attr, _old, _new):
|
||||
_update_param_plot()
|
||||
|
||||
fit_param_select = Select(title="Fit parameter", options=[], width=145)
|
||||
fit_param_select.on_change("value", fit_param_select_callback)
|
||||
|
||||
# Plot tabs
|
||||
plots = Tabs(
|
||||
tabs=[
|
||||
Panel(child=plot, title="single scan"),
|
||||
Panel(child=ov_plot, title="overview"),
|
||||
Panel(child=ov_param_plot, title="overview map"),
|
||||
Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"),
|
||||
]
|
||||
)
|
||||
|
||||
@ -391,6 +489,7 @@ def create():
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(file=[], scan=[], param=[], fit=[], export=[]))
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
@ -406,13 +505,6 @@ def create():
|
||||
autosize_mode="none",
|
||||
)
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
if scan_table_source.selected.indices:
|
||||
_update_plot()
|
||||
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
|
||||
def _get_selected_scan():
|
||||
return det_data[scan_table_source.selected.indices[0]]
|
||||
|
||||
@ -423,6 +515,7 @@ def create():
|
||||
param = [scan[new] for scan in det_data]
|
||||
|
||||
scan_table_source.data["param"] = param
|
||||
_update_param_plot()
|
||||
|
||||
param_select = Select(
|
||||
title="Parameter:",
|
||||
@ -551,44 +644,61 @@ def create():
|
||||
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
|
||||
|
||||
def fit_all_button_callback():
|
||||
def proc_all_button_callback():
|
||||
for scan, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_all_button = Button(label="Fit All", button_type="primary", width=145)
|
||||
fit_all_button.on_click(fit_all_button_callback)
|
||||
for scan in det_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].params.keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
_update_param_plot()
|
||||
|
||||
def fit_button_callback():
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def proc_button_callback():
|
||||
scan = _get_selected_scan()
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_button = Button(label="Fit Current", width=145)
|
||||
fit_button.on_click(fit_button_callback)
|
||||
for scan in det_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].params.keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
_update_param_plot()
|
||||
|
||||
def area_method_radiobutton_callback(_handler):
|
||||
_update_preview()
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
area_method_radiobutton = RadioButtonGroup(
|
||||
labels=["Fit area", "Int area"], active=0, width=145, disabled=True
|
||||
)
|
||||
area_method_radiobutton.on_click(area_method_radiobutton_callback)
|
||||
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
|
||||
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
|
||||
|
||||
def lorentz_checkbox_callback(_handler):
|
||||
_update_preview()
|
||||
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=[13, 5, 5, 5])
|
||||
lorentz_checkbox.on_click(lorentz_checkbox_callback)
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
|
||||
|
||||
export_preview_textinput = TextAreaInput(title="Export file preview:", width=450, height=400)
|
||||
|
||||
@ -596,28 +706,27 @@ def create():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/temp"
|
||||
export_data = []
|
||||
for s, export in zip(det_data, scan_table_source.data["export"]):
|
||||
param_data = []
|
||||
for s, p, export in zip(
|
||||
det_data, scan_table_source.data["param"], scan_table_source.data["export"]
|
||||
):
|
||||
if export:
|
||||
export_data.append(s)
|
||||
param_data.append(p)
|
||||
|
||||
pyzebra.export_1D(
|
||||
export_data,
|
||||
temp_file,
|
||||
area_method=AREA_METHODS[int(area_method_radiobutton.active)],
|
||||
lorentz=bool(lorentz_checkbox.active),
|
||||
)
|
||||
pyzebra.export_param_study(export_data, param_data, temp_file)
|
||||
|
||||
exported_content = ""
|
||||
file_content = []
|
||||
for ext in (".comm", ".incomm"):
|
||||
fname = temp_file + ext
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
content = f.read()
|
||||
exported_content += f"{ext} file:\n" + content
|
||||
else:
|
||||
content = ""
|
||||
file_content.append(content)
|
||||
|
||||
fname = temp_file
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
content = f.read()
|
||||
exported_content += content
|
||||
else:
|
||||
content = ""
|
||||
file_content.append(content)
|
||||
|
||||
js_data.data.update(content=file_content)
|
||||
export_preview_textinput.value = exported_content
|
||||
@ -629,17 +738,13 @@ def create():
|
||||
column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
|
||||
fitparams_table,
|
||||
Spacer(width=20),
|
||||
column(
|
||||
row(fit_from_spinner, fit_to_spinner),
|
||||
row(area_method_radiobutton, lorentz_checkbox),
|
||||
row(fit_button, fit_all_button),
|
||||
),
|
||||
column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
|
||||
column(fit_to_spinner, proc_button, proc_all_button),
|
||||
)
|
||||
|
||||
scan_layout = column(scan_table, row(monitor_spinner, param_select))
|
||||
scan_layout = column(scan_table, row(monitor_spinner, scan_motor_select, param_select))
|
||||
|
||||
import_layout = column(
|
||||
proposal_textinput,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
upload_div,
|
||||
|
@ -1,10 +1,9 @@
|
||||
import ast
|
||||
import math
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
Button,
|
||||
@ -16,17 +15,16 @@ from bokeh.models import (
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
)
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
|
||||
def create():
|
||||
path_prefix_textinput = TextInput(title="Path prefix:", value="")
|
||||
selection_list = TextAreaInput(title="ROIs:", rows=7)
|
||||
lattice_const_textinput = TextInput(
|
||||
title="Lattice constants:", value="8.3211,8.3211,8.3211,90.00,90.00,90.00"
|
||||
)
|
||||
doc = curdoc()
|
||||
events_data = doc.events_data
|
||||
|
||||
npeaks_spinner = Spinner(title="Number of peaks from hdf_view panel:", disabled=True)
|
||||
lattice_const_textinput = TextInput(title="Lattice constants:")
|
||||
max_res_spinner = Spinner(title="max-res:", value=2, step=0.01, width=145)
|
||||
seed_pool_size_spinner = Spinner(title="seed-pool-size:", value=5, step=0.01, width=145)
|
||||
seed_len_tol_spinner = Spinner(title="seed-len-tol:", value=0.02, step=0.01, width=145)
|
||||
@ -46,7 +44,6 @@ def create():
|
||||
os.mkdir(temp_peak_list_dir)
|
||||
temp_event_file = os.path.join(temp_peak_list_dir, "event-0.txt")
|
||||
temp_hkl_file = os.path.join(temp_dir, "hkl.h5")
|
||||
roi_dict = ast.literal_eval(selection_list.value)
|
||||
|
||||
comp_proc = subprocess.run(
|
||||
[
|
||||
@ -54,7 +51,7 @@ def create():
|
||||
"-n",
|
||||
"2",
|
||||
"python",
|
||||
os.path.expanduser("~/spind/gen_hkl_table.py"),
|
||||
os.path.join(doc.spind_path, "gen_hkl_table.py"),
|
||||
lattice_const_textinput.value,
|
||||
"--max-res",
|
||||
str(max_res_spinner.value),
|
||||
@ -69,7 +66,33 @@ def create():
|
||||
print(" ".join(comp_proc.args))
|
||||
print(comp_proc.stdout)
|
||||
|
||||
diff_vec = prepare_event_file(temp_event_file, roi_dict, path_prefix_textinput.value)
|
||||
# prepare an event file
|
||||
diff_vec = []
|
||||
with open(temp_event_file, "w") as f:
|
||||
npeaks = len(next(iter(doc.events_data.values())))
|
||||
for ind in range(npeaks):
|
||||
wave = events_data["wave"][ind]
|
||||
ddist = events_data["ddist"][ind]
|
||||
x_pos = events_data["x_pos"][ind]
|
||||
y_pos = events_data["y_pos"][ind]
|
||||
intensity = events_data["intensity"][ind]
|
||||
snr_cnts = events_data["snr_cnts"][ind]
|
||||
gamma = events_data["gamma"][ind]
|
||||
omega = events_data["omega"][ind]
|
||||
chi = events_data["chi"][ind]
|
||||
phi = events_data["phi"][ind]
|
||||
nu = events_data["nu"][ind]
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, gamma, nu, x_pos, y_pos)
|
||||
diff_vector = pyzebra.z1frmd(wave, ga, omega, chi, phi, nu)
|
||||
d_spacing = float(pyzebra.dandth(wave, diff_vector)[0])
|
||||
diff_vector = diff_vector.flatten() * 1e10
|
||||
dv1, dv2, dv3 = diff_vector
|
||||
|
||||
diff_vec.append(diff_vector)
|
||||
f.write(
|
||||
f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n"
|
||||
)
|
||||
|
||||
print(f"Content of {temp_event_file}:")
|
||||
with open(temp_event_file) as f:
|
||||
@ -81,7 +104,7 @@ def create():
|
||||
"-n",
|
||||
"2",
|
||||
"python",
|
||||
os.path.expanduser("~/spind/SPIND.py"),
|
||||
os.path.join(doc.spind_path, "SPIND.py"),
|
||||
temp_peak_list_dir,
|
||||
temp_hkl_file,
|
||||
"-o",
|
||||
@ -136,6 +159,9 @@ def create():
|
||||
process_button = Button(label="Process", button_type="primary")
|
||||
process_button.on_click(process_button_callback)
|
||||
|
||||
if doc.spind_path is None:
|
||||
process_button.disabled = True
|
||||
|
||||
ub_matrix_textareainput = TextAreaInput(title="UB matrix:", rows=7, width=400)
|
||||
hkl_textareainput = TextAreaInput(title="hkl values:", rows=7, width=400)
|
||||
|
||||
@ -175,8 +201,7 @@ def create():
|
||||
|
||||
tab_layout = row(
|
||||
column(
|
||||
path_prefix_textinput,
|
||||
selection_list,
|
||||
npeaks_spinner,
|
||||
lattice_const_textinput,
|
||||
row(max_res_spinner, seed_pool_size_spinner),
|
||||
row(seed_len_tol_spinner, seed_angle_tol_spinner),
|
||||
@ -186,87 +211,13 @@ def create():
|
||||
column(results_table, row(ub_matrix_textareainput, hkl_textareainput)),
|
||||
)
|
||||
|
||||
async def update_npeaks_spinner():
|
||||
npeaks = len(next(iter(doc.events_data.values())))
|
||||
npeaks_spinner.value = npeaks
|
||||
# TODO: check cell parameter for consistency?
|
||||
if npeaks:
|
||||
lattice_const_textinput.value = ",".join(map(str, doc.events_data["cell"][0]))
|
||||
|
||||
doc.add_periodic_callback(update_npeaks_spinner, 1000)
|
||||
|
||||
return Panel(child=tab_layout, title="spind")
|
||||
|
||||
|
||||
def gauss(x, *p):
|
||||
"""Defines Gaussian function
|
||||
Args:
|
||||
A - amplitude, mu - position of the center, sigma - width
|
||||
Returns:
|
||||
Gaussian function
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
||||
|
||||
|
||||
def prepare_event_file(export_filename, roi_dict, path_prefix=""):
|
||||
diff_vec = []
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
maxfev = 100000
|
||||
with open(export_filename, "w") as f:
|
||||
for file, rois in roi_dict.items():
|
||||
dat = pyzebra.read_detector_data(path_prefix + file + ".hdf")
|
||||
|
||||
wave = dat["wave"]
|
||||
ddist = dat["ddist"]
|
||||
|
||||
gamma = dat["gamma"][0]
|
||||
omega = dat["omega"][0]
|
||||
nu = dat["nu"][0]
|
||||
chi = dat["chi"][0]
|
||||
phi = dat["phi"][0]
|
||||
|
||||
scan_motor = dat["scan_motor"]
|
||||
var_angle = dat[scan_motor]
|
||||
|
||||
for roi in rois:
|
||||
x0, xN, y0, yN, fr0, frN = roi
|
||||
data_roi = dat["data"][fr0:frN, y0:yN, x0:xN]
|
||||
|
||||
cnts = np.sum(data_roi, axis=(1, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(cnts)), cnts, p0=p0, maxfev=maxfev)
|
||||
|
||||
m = cnts.mean()
|
||||
sd = cnts.std()
|
||||
snr_cnts = np.where(sd == 0, 0, m / sd)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
var_F = var_angle[math.floor(frC)]
|
||||
var_C = var_angle[math.ceil(frC)]
|
||||
frStep = frC - math.floor(frC)
|
||||
var_step = var_C - var_F
|
||||
var_p = var_F + var_step * frStep
|
||||
|
||||
if scan_motor == "gamma":
|
||||
gamma = var_p
|
||||
elif scan_motor == "omega":
|
||||
omega = var_p
|
||||
elif scan_motor == "nu":
|
||||
nu = var_p
|
||||
elif scan_motor == "chi":
|
||||
chi = var_p
|
||||
elif scan_motor == "phi":
|
||||
phi = var_p
|
||||
|
||||
intensity = coeff[1] * abs(coeff[2] * var_step) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
|
||||
projX = np.sum(data_roi, axis=(0, 1))
|
||||
coeff, _ = curve_fit(gauss, range(len(projX)), projX, p0=p0, maxfev=maxfev)
|
||||
x_pos = x0 + coeff[1]
|
||||
|
||||
projY = np.sum(data_roi, axis=(0, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(projY)), projY, p0=p0, maxfev=maxfev)
|
||||
y_pos = y0 + coeff[1]
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, gamma, nu, x_pos, y_pos)
|
||||
diff_vector = pyzebra.z1frmd(wave, ga, omega, chi, phi, nu)
|
||||
d_spacing = float(pyzebra.dandth(wave, diff_vector)[0])
|
||||
diff_vector = diff_vector.flatten() * 1e10
|
||||
dv1, dv2, dv3 = diff_vector
|
||||
|
||||
diff_vec.append(diff_vector)
|
||||
|
||||
f.write(f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n")
|
||||
|
||||
return diff_vec
|
||||
|
@ -76,7 +76,7 @@ CCL_SECOND_LINE = (
|
||||
("scan_motor", str),
|
||||
)
|
||||
|
||||
AREA_METHODS = ("fit_area", "int_area")
|
||||
EXPORT_TARGETS = {"fullprof": (".comm", ".incomm"), "jana": (".col", ".incol")}
|
||||
|
||||
|
||||
def load_1D(filepath):
|
||||
@ -159,6 +159,7 @@ def parse_1D(fileobj, data_type):
|
||||
|
||||
# "om" -> "omega"
|
||||
s["scan_motor"] = "omega"
|
||||
s["scan_motors"] = ["omega", ]
|
||||
# overwrite metadata, because it only refers to the scan center
|
||||
half_dist = (s["n_points"] - 1) / 2 * s["angle_step"]
|
||||
s["omega"] = np.linspace(s["omega"] - half_dist, s["omega"] + half_dist, s["n_points"])
|
||||
@ -167,7 +168,8 @@ def parse_1D(fileobj, data_type):
|
||||
counts = []
|
||||
while len(counts) < s["n_points"]:
|
||||
counts.extend(map(float, next(fileobj).split()))
|
||||
s["Counts"] = np.array(counts)
|
||||
s["counts"] = np.array(counts)
|
||||
s["counts_err"] = np.sqrt(s["counts"])
|
||||
|
||||
if s["h"].is_integer() and s["k"].is_integer() and s["l"].is_integer():
|
||||
s["h"], s["k"], s["l"] = map(int, (s["h"], s["k"], s["l"]))
|
||||
@ -182,23 +184,15 @@ def parse_1D(fileobj, data_type):
|
||||
s = defaultdict(list)
|
||||
|
||||
match = re.search("Scanning Variables: (.*), Steps: (.*)", next(fileobj))
|
||||
if match.group(1) == "h, k, l":
|
||||
steps = match.group(2).split()
|
||||
for step, ind in zip(steps, "hkl"):
|
||||
if float(step) != 0:
|
||||
scan_motor = ind
|
||||
break
|
||||
else:
|
||||
scan_motor = match.group(1)
|
||||
|
||||
s["scan_motor"] = scan_motor
|
||||
motors = [motor.lower() for motor in match.group(1).split(", ")]
|
||||
steps = [float(step) for step in match.group(2).split()]
|
||||
|
||||
match = re.search("(.*) Points, Mode: (.*), Preset (.*)", next(fileobj))
|
||||
if match.group(2) != "Monitor":
|
||||
raise Exception("Unknown mode in dat file.")
|
||||
s["monitor"] = float(match.group(3))
|
||||
|
||||
col_names = next(fileobj).split()
|
||||
col_names = list(map(str.lower, next(fileobj).split()))
|
||||
|
||||
for line in fileobj:
|
||||
if "END-OF-DATA" in line:
|
||||
@ -211,21 +205,33 @@ def parse_1D(fileobj, data_type):
|
||||
for name in col_names:
|
||||
s[name] = np.array(s[name])
|
||||
|
||||
s["counts_err"] = np.sqrt(s["counts"])
|
||||
|
||||
s["scan_motors"] = []
|
||||
for motor, step in zip(motors, steps):
|
||||
if step == 0:
|
||||
# it's not a scan motor, so keep only the median value
|
||||
s[motor] = np.median(s[motor])
|
||||
else:
|
||||
s["scan_motors"].append(motor)
|
||||
|
||||
# "om" -> "omega"
|
||||
if s["scan_motor"] == "om":
|
||||
s["scan_motor"] = "omega"
|
||||
if "om" in s["scan_motors"]:
|
||||
s["scan_motors"][s["scan_motors"].index("om")] = "omega"
|
||||
s["omega"] = s["om"]
|
||||
del s["om"]
|
||||
|
||||
# "tt" -> "temp"
|
||||
elif s["scan_motor"] == "tt":
|
||||
s["scan_motor"] = "temp"
|
||||
if "tt" in s["scan_motors"]:
|
||||
s["scan_motors"][s["scan_motors"].index("tt")] = "temp"
|
||||
s["temp"] = s["tt"]
|
||||
del s["tt"]
|
||||
|
||||
# "mf" stays "mf"
|
||||
# "phi" stays "phi"
|
||||
|
||||
s["scan_motor"] = s["scan_motors"][0]
|
||||
|
||||
if "h" not in s:
|
||||
s["h"] = s["k"] = s["l"] = float("nan")
|
||||
|
||||
@ -243,14 +249,19 @@ def parse_1D(fileobj, data_type):
|
||||
return scan
|
||||
|
||||
|
||||
def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precision=2):
|
||||
"""Exports data in the .comm/.incomm format
|
||||
def export_1D(data, path, export_target, hkl_precision=2):
|
||||
"""Exports data in the .comm/.incomm format for fullprof or .col/.incol format for jana.
|
||||
|
||||
Scans with integer/real hkl values are saved in .comm/.incomm files correspondingly. If no scans
|
||||
are present for a particular output format, that file won't be created.
|
||||
Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files
|
||||
correspondingly. If no scans are present for a particular output format, that file won't be
|
||||
created.
|
||||
"""
|
||||
if export_target not in EXPORT_TARGETS:
|
||||
raise ValueError(f"Unknown export target: {export_target}.")
|
||||
|
||||
zebra_mode = data[0]["zebra_mode"]
|
||||
file_content = {".comm": [], ".incomm": []}
|
||||
exts = EXPORT_TARGETS[export_target]
|
||||
file_content = {ext: [] for ext in exts}
|
||||
|
||||
for scan in data:
|
||||
if "fit" not in scan:
|
||||
@ -265,34 +276,7 @@ def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precis
|
||||
else:
|
||||
hkl_str = f"{h:8.{hkl_precision}f}{k:8.{hkl_precision}f}{l:8.{hkl_precision}f}"
|
||||
|
||||
for name, param in scan["fit"].params.items():
|
||||
if "amplitude" in name:
|
||||
if param.stderr is None:
|
||||
area_n = np.nan
|
||||
area_s = np.nan
|
||||
else:
|
||||
area_n = param.value
|
||||
area_s = param.stderr
|
||||
# TODO: take into account multiple peaks
|
||||
break
|
||||
else:
|
||||
# no peak functions in a fit model
|
||||
area_n = np.nan
|
||||
area_s = np.nan
|
||||
|
||||
# apply lorentz correction to area
|
||||
if lorentz:
|
||||
if zebra_mode == "bi":
|
||||
twotheta = np.deg2rad(scan["twotheta"])
|
||||
corr_factor = np.sin(twotheta)
|
||||
else: # zebra_mode == "nb":
|
||||
gamma = np.deg2rad(scan["gamma"])
|
||||
nu = np.deg2rad(scan["nu"])
|
||||
corr_factor = np.sin(gamma) * np.cos(nu)
|
||||
|
||||
area_n = np.abs(area_n * corr_factor)
|
||||
area_s = np.abs(area_s * corr_factor)
|
||||
|
||||
area_n, area_s = scan["area"]
|
||||
area_str = f"{area_n:10.2f}{area_s:10.2f}"
|
||||
|
||||
ang_str = ""
|
||||
@ -301,12 +285,47 @@ def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precis
|
||||
angle_center = (np.min(scan[angle]) + np.max(scan[angle])) / 2
|
||||
else:
|
||||
angle_center = scan[angle]
|
||||
|
||||
if angle == "twotheta" and export_target == "jana":
|
||||
angle_center /= 2
|
||||
|
||||
ang_str = ang_str + f"{angle_center:8g}"
|
||||
|
||||
ref = file_content[".comm"] if hkl_are_integers else file_content[".incomm"]
|
||||
if export_target == "jana":
|
||||
ang_str = ang_str + f"{scan['temp']:8}" + f"{scan['monitor']:8}"
|
||||
|
||||
ref = file_content[exts[0]] if hkl_are_integers else file_content[exts[1]]
|
||||
ref.append(idx_str + hkl_str + area_str + ang_str + "\n")
|
||||
|
||||
for ext, content in file_content.items():
|
||||
if content:
|
||||
with open(path + ext, "w") as out_file:
|
||||
out_file.writelines(content)
|
||||
|
||||
|
||||
def export_param_study(data, param_data, path):
|
||||
file_content = []
|
||||
for scan, param in zip(data, param_data):
|
||||
if "fit" not in scan:
|
||||
continue
|
||||
|
||||
if not file_content:
|
||||
title_str = f"{'param':12}"
|
||||
for fit_param_name in scan["fit"].params:
|
||||
title_str = title_str + f"{fit_param_name:20}" + f"{'std_' + fit_param_name:20}"
|
||||
title_str = title_str + "file"
|
||||
file_content.append(title_str + "\n")
|
||||
|
||||
param_str = f"{param:<12.2f}"
|
||||
|
||||
fit_str = ""
|
||||
for fit_param in scan["fit"].params.values():
|
||||
fit_str = fit_str + f"{fit_param.value:<20.2f}" + f"{fit_param.stderr:<20.2f}"
|
||||
|
||||
_, fname_str = os.path.split(scan["original_filename"])
|
||||
|
||||
file_content.append(param_str + fit_str + fname_str + "\n")
|
||||
|
||||
if file_content:
|
||||
with open(path, "w") as out_file:
|
||||
out_file.writelines(file_content)
|
||||
|
@ -1,8 +1,8 @@
|
||||
import itertools
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from lmfit.models import GaussianModel, LinearModel, PseudoVoigtModel, VoigtModel
|
||||
from scipy.integrate import simpson, trapezoid
|
||||
|
||||
from .ccl_io import CCL_ANGLES
|
||||
|
||||
@ -22,18 +22,24 @@ MAX_RANGE_GAP = {
|
||||
"omega": 0.5,
|
||||
}
|
||||
|
||||
AREA_METHODS = ("fit_area", "int_area")
|
||||
|
||||
|
||||
def normalize_dataset(dataset, monitor=100_000):
|
||||
for scan in dataset:
|
||||
monitor_ratio = monitor / scan["monitor"]
|
||||
scan["Counts"] *= monitor_ratio
|
||||
scan["counts"] *= monitor_ratio
|
||||
scan["counts_err"] *= monitor_ratio
|
||||
scan["monitor"] = monitor
|
||||
|
||||
|
||||
def merge_duplicates(dataset):
|
||||
for scan_i, scan_j in itertools.combinations(dataset, 2):
|
||||
if _parameters_match(scan_i, scan_j):
|
||||
merge_scans(scan_i, scan_j)
|
||||
merged = np.zeros(len(dataset), dtype=np.bool)
|
||||
for ind_into, scan_into in enumerate(dataset):
|
||||
for ind_from, scan_from in enumerate(dataset[ind_into + 1 :], start=ind_into + 1):
|
||||
if _parameters_match(scan_into, scan_from) and not merged[ind_from]:
|
||||
merge_scans(scan_into, scan_from)
|
||||
merged[ind_from] = True
|
||||
|
||||
|
||||
def _parameters_match(scan1, scan2):
|
||||
@ -61,30 +67,72 @@ def _parameters_match(scan1, scan2):
|
||||
return True
|
||||
|
||||
|
||||
def merge_datasets(dataset1, dataset2):
|
||||
for scan_j in dataset2:
|
||||
for scan_i in dataset1:
|
||||
if _parameters_match(scan_i, scan_j):
|
||||
merge_scans(scan_i, scan_j)
|
||||
break
|
||||
def merge_datasets(dataset_into, dataset_from):
|
||||
merged = np.zeros(len(dataset_from), dtype=np.bool)
|
||||
for scan_into in dataset_into:
|
||||
for ind, scan_from in enumerate(dataset_from):
|
||||
if _parameters_match(scan_into, scan_from) and not merged[ind]:
|
||||
merge_scans(scan_into, scan_from)
|
||||
merged[ind] = True
|
||||
|
||||
dataset1.append(scan_j)
|
||||
for scan_from in dataset_from:
|
||||
dataset_into.append(scan_from)
|
||||
|
||||
|
||||
def merge_scans(scan1, scan2):
|
||||
omega = np.concatenate((scan1["omega"], scan2["omega"]))
|
||||
counts = np.concatenate((scan1["Counts"], scan2["Counts"]))
|
||||
def merge_scans(scan_into, scan_from):
|
||||
# TODO: does it need to be "scan_motor" instead of omega for a generalized solution?
|
||||
if "init_scan" not in scan_into:
|
||||
scan_into["init_scan"] = scan_into.copy()
|
||||
|
||||
index = np.argsort(omega)
|
||||
if "merged_scans" not in scan_into:
|
||||
scan_into["merged_scans"] = []
|
||||
|
||||
scan1["omega"] = omega[index]
|
||||
scan1["Counts"] = counts[index]
|
||||
if scan_from in scan_into["merged_scans"]:
|
||||
return
|
||||
|
||||
scan2["active"] = False
|
||||
scan_into["merged_scans"].append(scan_from)
|
||||
|
||||
fname1 = os.path.basename(scan1["original_filename"])
|
||||
fname2 = os.path.basename(scan2["original_filename"])
|
||||
print(f'Merging scans: {scan1["idx"]} ({fname1}) <-- {scan2["idx"]} ({fname2})')
|
||||
if (
|
||||
scan_into["omega"].shape == scan_from["omega"].shape
|
||||
and np.max(np.abs(scan_into["omega"] - scan_from["omega"])) < 0.0005
|
||||
):
|
||||
counts_tmp = 0
|
||||
counts_err_tmp = 0
|
||||
|
||||
for scan in [scan_into["init_scan"], *scan_into["merged_scans"]]:
|
||||
counts_tmp += scan["counts"]
|
||||
counts_err_tmp += scan["counts_err"] ** 2
|
||||
|
||||
scan_into["counts"] = counts_tmp / (1 + len(scan_into["merged_scans"]))
|
||||
scan_into["counts_err"] = np.sqrt(counts_err_tmp)
|
||||
|
||||
else:
|
||||
omega = np.concatenate((scan_into["omega"], scan_from["omega"]))
|
||||
counts = np.concatenate((scan_into["counts"], scan_from["counts"]))
|
||||
counts_err = np.concatenate((scan_into["counts_err"], scan_from["counts_err"]))
|
||||
|
||||
index = np.argsort(omega)
|
||||
|
||||
scan_into["omega"] = omega[index]
|
||||
scan_into["counts"] = counts[index]
|
||||
scan_into["counts_err"] = counts_err[index]
|
||||
|
||||
scan_from["active"] = False
|
||||
|
||||
fname1 = os.path.basename(scan_into["original_filename"])
|
||||
fname2 = os.path.basename(scan_from["original_filename"])
|
||||
print(f'Merging scans: {scan_into["idx"]} ({fname1}) <-- {scan_from["idx"]} ({fname2})')
|
||||
|
||||
|
||||
def restore_scan(scan):
|
||||
if "merged_scans" in scan:
|
||||
for merged_scan in scan["merged_scans"]:
|
||||
merged_scan["active"] = True
|
||||
|
||||
if "init_scan" in scan:
|
||||
tmp = scan["init_scan"]
|
||||
scan.clear()
|
||||
scan.update(tmp)
|
||||
|
||||
|
||||
def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
@ -93,12 +141,14 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
if fit_to is None:
|
||||
fit_to = np.inf
|
||||
|
||||
y_fit = scan["Counts"]
|
||||
y_fit = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x_fit = scan[scan["scan_motor"]]
|
||||
|
||||
# apply fitting range
|
||||
fit_ind = (fit_from <= x_fit) & (x_fit <= fit_to)
|
||||
y_fit = y_fit[fit_ind]
|
||||
y_err = y_err[fit_ind]
|
||||
x_fit = x_fit[fit_ind]
|
||||
|
||||
model = None
|
||||
@ -146,5 +196,44 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
else:
|
||||
model += _model
|
||||
|
||||
weights = [1 / np.sqrt(val) if val != 0 else 1 for val in y_fit]
|
||||
weights = [1 / y_err if y_err != 0 else 1 for y_err in y_err]
|
||||
scan["fit"] = model.fit(y_fit, x=x_fit, weights=weights)
|
||||
|
||||
|
||||
def get_area(scan, area_method, lorentz):
|
||||
if area_method not in AREA_METHODS:
|
||||
raise ValueError(f"Unknown area method: {area_method}.")
|
||||
|
||||
if area_method == "fit_area":
|
||||
area_v = 0
|
||||
area_s = 0
|
||||
for name, param in scan["fit"].params.items():
|
||||
if "amplitude" in name:
|
||||
if param.stderr is None:
|
||||
area_v = np.nan
|
||||
area_s = np.nan
|
||||
else:
|
||||
area_v += param.value
|
||||
area_s += param.stderr
|
||||
|
||||
else: # area_method == "int_area"
|
||||
y_val = scan["counts"]
|
||||
x_val = scan[scan["scan_motor"]]
|
||||
y_bkg = scan["fit"].eval_components(x=x_val)["f0_"]
|
||||
area_v = simpson(y_val, x=x_val) - trapezoid(y_bkg, x=x_val)
|
||||
area_s = np.sqrt(area_v)
|
||||
|
||||
if lorentz:
|
||||
# lorentz correction to area
|
||||
if scan["zebra_mode"] == "bi":
|
||||
twotheta = np.deg2rad(scan["twotheta"])
|
||||
corr_factor = np.sin(twotheta)
|
||||
else: # zebra_mode == "nb":
|
||||
gamma = np.deg2rad(scan["gamma"])
|
||||
nu = np.deg2rad(scan["nu"])
|
||||
corr_factor = np.sin(gamma) * np.cos(nu)
|
||||
|
||||
area_v = np.abs(area_v * corr_factor)
|
||||
area_s = np.abs(area_s * corr_factor)
|
||||
|
||||
scan["area"] = (area_v, area_s)
|
||||
|
@ -1,6 +1,11 @@
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
|
||||
META_MATRIX = ("UB")
|
||||
META_CELL = ("cell")
|
||||
META_STR = ("name")
|
||||
|
||||
def read_h5meta(filepath):
|
||||
"""Open and parse content of a h5meta file.
|
||||
|
||||
@ -23,18 +28,37 @@ def parse_h5meta(file):
|
||||
line = line.strip()
|
||||
if line.startswith("#begin "):
|
||||
section = line[len("#begin ") :]
|
||||
content[section] = []
|
||||
if section in ("detector parameters", "crystal"):
|
||||
content[section] = {}
|
||||
else:
|
||||
content[section] = []
|
||||
|
||||
elif line.startswith("#end"):
|
||||
section = None
|
||||
|
||||
elif section:
|
||||
content[section].append(line)
|
||||
if section in ("detector parameters", "crystal"):
|
||||
if "=" in line:
|
||||
variable, value = line.split("=", 1)
|
||||
variable = variable.strip()
|
||||
value = value.strip()
|
||||
|
||||
if variable in META_STR:
|
||||
pass
|
||||
elif variable in META_CELL:
|
||||
value = np.array(value.split(",")[:6], dtype=np.float)
|
||||
elif variable in META_MATRIX:
|
||||
value = np.array(value.split(",")[:9], dtype=np.float).reshape(3, 3)
|
||||
else: # default is a single float number
|
||||
value = float(value)
|
||||
content[section][variable] = value
|
||||
else:
|
||||
content[section].append(line)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def read_detector_data(filepath):
|
||||
def read_detector_data(filepath, cami_meta=None):
|
||||
"""Read detector data and angles from an h5 file.
|
||||
|
||||
Args:
|
||||
@ -51,12 +75,18 @@ def read_detector_data(filepath):
|
||||
data = data.reshape(n, rows, cols)
|
||||
|
||||
det_data = {"data": data}
|
||||
det_data["original_filename"] = filepath
|
||||
|
||||
if "/entry1/zebra_mode" in h5f:
|
||||
det_data["zebra_mode"] = h5f["/entry1/zebra_mode"][0].decode()
|
||||
else:
|
||||
det_data["zebra_mode"] = "nb"
|
||||
|
||||
# overwrite zebra_mode from cami
|
||||
if cami_meta is not None:
|
||||
if "zebra_mode" in cami_meta:
|
||||
det_data["zebra_mode"] = cami_meta["zebra_mode"][0]
|
||||
|
||||
# om, sometimes ph
|
||||
if det_data["zebra_mode"] == "nb":
|
||||
det_data["omega"] = h5f["/entry1/area_detector2/rotation_angle"][:]
|
||||
@ -70,6 +100,8 @@ def read_detector_data(filepath):
|
||||
det_data["chi"] = h5f["/entry1/sample/chi"][:] # ch
|
||||
det_data["phi"] = h5f["/entry1/sample/phi"][:] # ph
|
||||
det_data["ub"] = h5f["/entry1/sample/UB"][:].reshape(3, 3)
|
||||
det_data["name"] = h5f["/entry1/sample/name"][0].decode()
|
||||
det_data["cell"] = h5f["/entry1/sample/cell"][:]
|
||||
|
||||
for var in ("omega", "gamma", "nu", "chi", "phi"):
|
||||
if abs(det_data[var][0] - det_data[var][-1]) > 0.1:
|
||||
@ -85,4 +117,22 @@ def read_detector_data(filepath):
|
||||
if "/entry1/sample/temperature" in h5f:
|
||||
det_data["temp"] = h5f["/entry1/sample/temperature"][:]
|
||||
|
||||
# overwrite metadata from .cami
|
||||
if cami_meta is not None:
|
||||
if "crystal" in cami_meta:
|
||||
cami_meta_crystal = cami_meta["crystal"]
|
||||
if "name" in cami_meta_crystal:
|
||||
det_data["name"] = cami_meta_crystal["name"]
|
||||
if "UB" in cami_meta_crystal:
|
||||
det_data["ub"] = cami_meta_crystal["UB"]
|
||||
if "cell" in cami_meta_crystal:
|
||||
det_data["cell"] = cami_meta_crystal["cell"]
|
||||
if "lambda" in cami_meta_crystal:
|
||||
det_data["wave"] = cami_meta_crystal["lambda"]
|
||||
|
||||
if "detector parameters" in cami_meta:
|
||||
cami_meta_detparam = cami_meta["detector parameters"]
|
||||
if "dist1" in cami_meta_detparam:
|
||||
det_data["ddist"] = cami_meta_detparam["dist1"]
|
||||
|
||||
return det_data
|
||||
|
20
pyzebra/utils.py
Normal file
20
pyzebra/utils.py
Normal file
@ -0,0 +1,20 @@
|
||||
import os
|
||||
|
||||
ZEBRA_PROPOSALS_PATHS = [
|
||||
f"/afs/psi.ch/project/sinqdata/{year}/zebra/" for year in (2016, 2017, 2018, 2020, 2021)
|
||||
]
|
||||
|
||||
def find_proposal_path(proposal):
|
||||
proposal = proposal.strip()
|
||||
if proposal:
|
||||
for zebra_proposals_path in ZEBRA_PROPOSALS_PATHS:
|
||||
proposal_path = os.path.join(zebra_proposals_path, proposal)
|
||||
if os.path.isdir(proposal_path):
|
||||
# found it
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"Can not find data for proposal '{proposal}'.")
|
||||
else:
|
||||
proposal_path = ""
|
||||
|
||||
return proposal_path
|
@ -1,15 +1,5 @@
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
from numba import njit
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
try:
|
||||
from matplotlib import pyplot as plt
|
||||
except ImportError:
|
||||
print("matplotlib is not available")
|
||||
|
||||
pi_r = 180 / np.pi
|
||||
|
||||
@ -393,84 +383,3 @@ def gauss(x, *p):
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
||||
|
||||
|
||||
def box_int(file, box):
|
||||
"""Calculates center of the peak in the NB-geometry angles and Intensity of the peak
|
||||
|
||||
Args:
|
||||
file name, box size [x0:xN, y0:yN, fr0:frN]
|
||||
|
||||
Returns:
|
||||
gamma, omPeak, nu polar angles, Int and data for 3 fit plots
|
||||
"""
|
||||
|
||||
dat = pyzebra.read_detector_data(file)
|
||||
|
||||
sttC = dat["gamma"][0]
|
||||
om = dat["omega"]
|
||||
nuC = dat["nu"][0]
|
||||
ddist = dat["ddist"]
|
||||
|
||||
# defining indices
|
||||
x0, xN, y0, yN, fr0, frN = box
|
||||
|
||||
# omega fit
|
||||
om = dat["omega"][fr0:frN]
|
||||
cnts = np.sum(dat["data"][fr0:frN, y0:yN, x0:xN], axis=(1, 2))
|
||||
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(cnts)), cnts, p0=p0)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
omF = dat["omega"][math.floor(frC)]
|
||||
omC = dat["omega"][math.ceil(frC)]
|
||||
frStep = frC - math.floor(frC)
|
||||
omStep = omC - omF
|
||||
omP = omF + omStep * frStep
|
||||
Int = coeff[1] * abs(coeff[2] * omStep) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
# omega plot
|
||||
x_fit = np.linspace(0, len(cnts), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.figure()
|
||||
plt.subplot(131)
|
||||
plt.plot(range(len(cnts)), cnts)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("Frame N of the box")
|
||||
label = "om"
|
||||
# gamma fit
|
||||
sliceXY = dat["data"][fr0:frN, y0:yN, x0:xN]
|
||||
sliceXZ = np.sum(sliceXY, axis=1)
|
||||
sliceYZ = np.sum(sliceXY, axis=2)
|
||||
|
||||
projX = np.sum(sliceXZ, axis=0)
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(projX)), projX, p0=p0)
|
||||
x = x0 + coeff[1]
|
||||
# gamma plot
|
||||
x_fit = np.linspace(0, len(projX), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.subplot(132)
|
||||
plt.plot(range(len(projX)), projX)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("X-pixel of the box")
|
||||
|
||||
# nu fit
|
||||
projY = np.sum(sliceYZ, axis=0)
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(projY)), projY, p0=p0)
|
||||
y = y0 + coeff[1]
|
||||
# nu plot
|
||||
x_fit = np.linspace(0, len(projY), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.subplot(133)
|
||||
plt.plot(range(len(projY)), projY)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("Y-pixel of the box")
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, sttC, nuC, x, y)
|
||||
|
||||
return ga[0], omP, nu[0], Int
|
||||
|
@ -1,4 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate prod
|
||||
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80
|
||||
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80 --spind-path=/home/pyzebra/spind
|
||||
|
@ -1,4 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate test
|
||||
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006
|
||||
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006 --spind-path=/home/pyzebra/spind
|
||||
|
Reference in New Issue
Block a user