Compare commits
131 Commits
Author | SHA1 | Date | |
---|---|---|---|
328b71e058 | |||
11ab8485bc | |||
4734b3e50f | |||
dfeeed284b | |||
9adf83ec74 | |||
a299449209 | |||
45a81aa632 | |||
3926e8de39 | |||
d2e2a2c7fd | |||
3934dcdd07 | |||
4c8037af5c | |||
e29b4e7da8 | |||
7189ee8196 | |||
be8417856a | |||
8ba062064a | |||
6557b2f3a4 | |||
7dcd20198f | |||
13a6ff285a | |||
09b6e4fdcf | |||
e7780a2405 | |||
e8b85bcea3 | |||
2482746f14 | |||
3986b8173f | |||
16966b6e3e | |||
e9d3fcc41a | |||
506d70a913 | |||
fc4e9c12cf | |||
c5faa0a55a | |||
c9922bb0cb | |||
813270d6f8 | |||
cf2f8435e7 | |||
380abfb102 | |||
c8502a3b93 | |||
b84fc632aa | |||
3acd57adb9 | |||
960ce0a534 | |||
1d43a952e6 | |||
9f7a7b8bbf | |||
8129b5e683 | |||
eaa6c4a2ad | |||
c2be907113 | |||
4dae756b3e | |||
a77a40618d | |||
a73c34b06f | |||
4b9f0a8c36 | |||
9f56921072 | |||
49a6bd22ae | |||
5b502b31eb | |||
20e99c35ba | |||
abf4750030 | |||
5de09d16ca | |||
5c4362d984 | |||
8d065b85a4 | |||
c86466b470 | |||
b8968192ca | |||
4745f0f401 | |||
9f6e7230fa | |||
089a0cf5ac | |||
639dc070c3 | |||
fec463398d | |||
b6d7a52b06 | |||
d6e599d4f9 | |||
d6b27fb33a | |||
bae15ee2ef | |||
c2bd6c25f5 | |||
cf6527af13 | |||
57e503fc3d | |||
c10efeb9cc | |||
137f20cc20 | |||
531463a637 | |||
e3368c1817 | |||
313bd8bc62 | |||
fe61d3c4cb | |||
f6d9f63863 | |||
620f32446a | |||
4b4d5c16ce | |||
91b9e01441 | |||
18ea894f35 | |||
9141ac49c7 | |||
2adbcc6bcd | |||
b39d970960 | |||
b11004bf0f | |||
6c2e221595 | |||
502a4b8096 | |||
3fe4fca96a | |||
a3e3e6768f | |||
0b6a58e160 | |||
09d22e7674 | |||
90387174e5 | |||
e99edbaf72 | |||
a2fceffc1b | |||
415d68b4dc | |||
00ff4117ea | |||
67853b8db4 | |||
60787bccb7 | |||
880d86d750 | |||
7a88e5e254 | |||
20f2a8ada4 | |||
42c092fc14 | |||
8153db9f67 | |||
62c969d6ad | |||
085620abae | |||
9ebe290966 | |||
c9cd96c521 | |||
d745cda4a5 | |||
1b5f70afa0 | |||
a034065a09 | |||
2a60c86b48 | |||
ccc075975f | |||
4982b05de0 | |||
2b0c392a3e | |||
099842b2bd | |||
bd3efd698a | |||
24f083e585 | |||
f43488af34 | |||
1b90d53466 | |||
c1b3a28351 | |||
5b45685257 | |||
e7b28a4e75 | |||
83a7d607a5 | |||
5eedd14b3f | |||
3db7dca7ba | |||
b2d1a0be02 | |||
69d22dd067 | |||
242da76c59 | |||
0c812a5dd5 | |||
4cfcb3d396 | |||
8018783eb5 | |||
fdb1609a41 | |||
e7dda3cda8 | |||
f788d74f15 |
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@ -8,6 +8,7 @@
|
||||
"program": "${workspaceFolder}/pyzebra/app/cli.py",
|
||||
"console": "internalConsole",
|
||||
"env": {},
|
||||
"justMyCode": false,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -22,11 +22,9 @@ requirements:
|
||||
- numpy
|
||||
- scipy
|
||||
- h5py
|
||||
- bokeh =2.3
|
||||
- matplotlib
|
||||
- bokeh =2.4
|
||||
- numba
|
||||
- lmfit
|
||||
- uncertainties
|
||||
- lmfit >=1.0.2
|
||||
|
||||
|
||||
about:
|
||||
|
@ -1,7 +1,8 @@
|
||||
from pyzebra.anatric import *
|
||||
from pyzebra.ccl_io import *
|
||||
from pyzebra.h5 import *
|
||||
from pyzebra.xtal import *
|
||||
from pyzebra.ccl_process import *
|
||||
from pyzebra.h5 import *
|
||||
from pyzebra.utils import *
|
||||
from pyzebra.xtal import *
|
||||
|
||||
__version__ = "0.3.0"
|
||||
__version__ = "0.6.1"
|
||||
|
@ -7,6 +7,7 @@ DATA_FACTORY_IMPLEMENTATION = [
|
||||
"morph",
|
||||
"d10",
|
||||
]
|
||||
|
||||
REFLECTION_PRINTER_FORMATS = [
|
||||
"rafin",
|
||||
"rafinf",
|
||||
@ -20,11 +21,21 @@ REFLECTION_PRINTER_FORMATS = [
|
||||
"oksana",
|
||||
]
|
||||
|
||||
ANATRIC_PATH = "/afs/psi.ch/project/sinq/rhel7/bin/anatric"
|
||||
ALGORITHMS = ["adaptivemaxcog", "adaptivedynamic"]
|
||||
|
||||
|
||||
def anatric(config_file, anatric_path="/afs/psi.ch/project/sinq/rhel7/bin/anatric"):
|
||||
subprocess.run([anatric_path, config_file], check=True)
|
||||
def anatric(config_file, anatric_path=ANATRIC_PATH, cwd=None):
|
||||
comp_proc = subprocess.run(
|
||||
[anatric_path, config_file],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=cwd,
|
||||
check=True,
|
||||
text=True,
|
||||
)
|
||||
print(" ".join(comp_proc.args))
|
||||
print(comp_proc.stdout)
|
||||
|
||||
|
||||
class AnatricConfig:
|
||||
@ -51,10 +62,13 @@ class AnatricConfig:
|
||||
def save_as(self, filename):
|
||||
self._tree.write(filename)
|
||||
|
||||
def tostring(self):
|
||||
return ET.tostring(self._tree.getroot(), encoding="unicode")
|
||||
|
||||
def _get_attr(self, name, tag, attr):
|
||||
elem = self._tree.find(name).find(tag)
|
||||
if elem is None:
|
||||
return None
|
||||
return ""
|
||||
return elem.attrib[attr]
|
||||
|
||||
def _set_attr(self, name, tag, attr, value):
|
||||
@ -217,7 +231,7 @@ class AnatricConfig:
|
||||
elem = self._tree.find("crystal").find("UB")
|
||||
if elem is not None:
|
||||
return elem.text
|
||||
return None
|
||||
return ""
|
||||
|
||||
@crystal_UB.setter
|
||||
def crystal_UB(self, value):
|
||||
@ -236,12 +250,37 @@ class AnatricConfig:
|
||||
|
||||
@property
|
||||
def dataFactory_dist1(self):
|
||||
return self._tree.find("DataFactory").find("dist1").attrib["value"]
|
||||
elem = self._tree.find("DataFactory").find("dist1")
|
||||
if elem is not None:
|
||||
return elem.attrib["value"]
|
||||
return ""
|
||||
|
||||
@dataFactory_dist1.setter
|
||||
def dataFactory_dist1(self, value):
|
||||
self._tree.find("DataFactory").find("dist1").attrib["value"] = value
|
||||
|
||||
@property
|
||||
def dataFactory_dist2(self):
|
||||
elem = self._tree.find("DataFactory").find("dist2")
|
||||
if elem is not None:
|
||||
return elem.attrib["value"]
|
||||
return ""
|
||||
|
||||
@dataFactory_dist2.setter
|
||||
def dataFactory_dist2(self, value):
|
||||
self._tree.find("DataFactory").find("dist2").attrib["value"] = value
|
||||
|
||||
@property
|
||||
def dataFactory_dist3(self):
|
||||
elem = self._tree.find("DataFactory").find("dist3")
|
||||
if elem is not None:
|
||||
return elem.attrib["value"]
|
||||
return ""
|
||||
|
||||
@dataFactory_dist3.setter
|
||||
def dataFactory_dist3(self, value):
|
||||
self._tree.find("DataFactory").find("dist3").attrib["value"] = value
|
||||
|
||||
@property
|
||||
def reflectionPrinter_format(self):
|
||||
return self._tree.find("ReflectionPrinter").attrib["format"]
|
||||
@ -253,6 +292,14 @@ class AnatricConfig:
|
||||
|
||||
self._tree.find("ReflectionPrinter").attrib["format"] = value
|
||||
|
||||
@property
|
||||
def reflectionPrinter_file(self):
|
||||
return self._tree.find("ReflectionPrinter").attrib["file"]
|
||||
|
||||
@reflectionPrinter_file.setter
|
||||
def reflectionPrinter_file(self, value):
|
||||
self._tree.find("ReflectionPrinter").attrib["file"] = value
|
||||
|
||||
@property
|
||||
def algorithm(self):
|
||||
return self._tree.find("Algorithm").attrib["implementation"]
|
||||
@ -269,7 +316,7 @@ class AnatricConfig:
|
||||
def _get_alg_attr(self, alg, tag, attr):
|
||||
param_elem = self._alg_elems[alg].find(tag)
|
||||
if param_elem is None:
|
||||
return None
|
||||
return ""
|
||||
return param_elem.attrib[attr]
|
||||
|
||||
def _set_alg_attr(self, alg, tag, attr, value):
|
||||
|
@ -2,17 +2,19 @@ import logging
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
import pyzebra
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import Tabs, TextAreaInput
|
||||
from bokeh.models import Button, Panel, Tabs, TextAreaInput, TextInput
|
||||
|
||||
import panel_ccl_integrate
|
||||
import panel_ccl_compare
|
||||
import panel_hdf_anatric
|
||||
import panel_hdf_param_study
|
||||
import panel_hdf_viewer
|
||||
import panel_param_study
|
||||
import panel_spind
|
||||
|
||||
|
||||
doc = curdoc()
|
||||
|
||||
sys.stdout = StringIO()
|
||||
@ -25,16 +27,41 @@ bokeh_logger = logging.getLogger("bokeh")
|
||||
bokeh_logger.addHandler(bokeh_handler)
|
||||
bokeh_log_textareainput = TextAreaInput(title="server output:", height=150)
|
||||
|
||||
# Final layout
|
||||
tab_hdf_viewer = panel_hdf_viewer.create()
|
||||
tab_hdf_anatric = panel_hdf_anatric.create()
|
||||
tab_ccl_integrate = panel_ccl_integrate.create()
|
||||
tab_param_study = panel_param_study.create()
|
||||
tab_spind = panel_spind.create()
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
apply_button.disabled = False
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", name="")
|
||||
proposal_textinput.on_change("value_input", proposal_textinput_callback)
|
||||
doc.proposal_textinput = proposal_textinput
|
||||
|
||||
def apply_button_callback():
|
||||
try:
|
||||
proposal_path = pyzebra.find_proposal_path(proposal_textinput.value)
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
return
|
||||
|
||||
proposal_textinput.name = proposal_path
|
||||
apply_button.disabled = True
|
||||
|
||||
apply_button = Button(label="Apply", button_type="primary")
|
||||
apply_button.on_click(apply_button_callback)
|
||||
|
||||
# Final layout
|
||||
doc.add_root(
|
||||
column(
|
||||
Tabs(tabs=[tab_hdf_viewer, tab_hdf_anatric, tab_ccl_integrate, tab_param_study, tab_spind]),
|
||||
Tabs(
|
||||
tabs=[
|
||||
Panel(child=column(proposal_textinput, apply_button), title="user config"),
|
||||
panel_hdf_viewer.create(),
|
||||
panel_hdf_anatric.create(),
|
||||
panel_ccl_integrate.create(),
|
||||
panel_ccl_compare.create(),
|
||||
panel_param_study.create(),
|
||||
panel_hdf_param_study.create(),
|
||||
panel_spind.create(),
|
||||
]
|
||||
),
|
||||
row(stdout_textareainput, bokeh_log_textareainput, sizing_mode="scale_both"),
|
||||
)
|
||||
)
|
||||
|
@ -6,6 +6,7 @@ from bokeh.application.application import Application
|
||||
from bokeh.application.handlers import ScriptHandler
|
||||
from bokeh.server.server import Server
|
||||
|
||||
from pyzebra.anatric import ANATRIC_PATH
|
||||
from pyzebra.app.handler import PyzebraHandler
|
||||
|
||||
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.INFO)
|
||||
@ -38,10 +39,11 @@ def main():
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--anatric-path",
|
||||
type=str,
|
||||
default=None,
|
||||
help="path to anatric executable",
|
||||
"--anatric-path", type=str, default=ANATRIC_PATH, help="path to anatric executable",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--spind-path", type=str, default=None, help="path to spind scripts folder",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
@ -55,7 +57,7 @@ def main():
|
||||
|
||||
logger.info(app_path)
|
||||
|
||||
pyzebra_handler = PyzebraHandler(args.anatric_path)
|
||||
pyzebra_handler = PyzebraHandler(args.anatric_path, args.spind_path)
|
||||
handler = ScriptHandler(filename=app_path, argv=args.args)
|
||||
server = Server(
|
||||
{"/": Application(pyzebra_handler, handler)},
|
||||
|
@ -5,7 +5,7 @@ class PyzebraHandler(Handler):
|
||||
"""Provides a mechanism for generic bokeh applications to build up new streamvis documents.
|
||||
"""
|
||||
|
||||
def __init__(self, anatric_path):
|
||||
def __init__(self, anatric_path, spind_path):
|
||||
"""Initialize a pyzebra handler for bokeh applications.
|
||||
|
||||
Args:
|
||||
@ -14,6 +14,7 @@ class PyzebraHandler(Handler):
|
||||
super().__init__() # no-op
|
||||
|
||||
self.anatric_path = anatric_path
|
||||
self.spind_path = spind_path
|
||||
|
||||
def modify_document(self, doc):
|
||||
"""Modify an application document with pyzebra specific features.
|
||||
@ -26,5 +27,6 @@ class PyzebraHandler(Handler):
|
||||
"""
|
||||
doc.title = "pyzebra"
|
||||
doc.anatric_path = self.anatric_path
|
||||
doc.spind_path = self.spind_path
|
||||
|
||||
return doc
|
||||
|
718
pyzebra/app/panel_ccl_compare.py
Normal file
718
pyzebra/app/panel_ccl_compare.py
Normal file
@ -0,0 +1,718 @@
|
||||
import base64
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
import types
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
Button,
|
||||
CellEditor,
|
||||
CheckboxEditor,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
CustomJS,
|
||||
DataRange1d,
|
||||
DataTable,
|
||||
Div,
|
||||
Dropdown,
|
||||
FileInput,
|
||||
Grid,
|
||||
Legend,
|
||||
Line,
|
||||
LinearAxis,
|
||||
MultiLine,
|
||||
MultiSelect,
|
||||
NumberEditor,
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioGroup,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
Spacer,
|
||||
Span,
|
||||
Spinner,
|
||||
TableColumn,
|
||||
TextAreaInput,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import EXPORT_TARGETS
|
||||
from pyzebra.ccl_process import AREA_METHODS
|
||||
|
||||
|
||||
javaScript = """
|
||||
let j = 0;
|
||||
for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
if (js_data.data['content'][i] === "") continue;
|
||||
|
||||
setTimeout(function() {
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
}, 100 * j)
|
||||
|
||||
j++;
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data1 = []
|
||||
det_data2 = []
|
||||
fit_params = {}
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""], ext=["", ""]))
|
||||
|
||||
def file_select_update_for_proposal():
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
file_open_button.disabled = False
|
||||
else:
|
||||
file_select.options = []
|
||||
file_open_button.disabled = True
|
||||
|
||||
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update_for_proposal()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
# det_data2 should have the same metadata to det_data1
|
||||
scan_list = [s["idx"] for s in det_data1]
|
||||
hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in det_data1]
|
||||
export = [s["export"] for s in det_data1]
|
||||
|
||||
twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in det_data1]
|
||||
gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in det_data1]
|
||||
omega = [np.median(s["omega"]) if "omega" in s else None for s in det_data1]
|
||||
chi = [np.median(s["chi"]) if "chi" in s else None for s in det_data1]
|
||||
phi = [np.median(s["phi"]) if "phi" in s else None for s in det_data1]
|
||||
nu = [np.median(s["nu"]) if "nu" in s else None for s in det_data1]
|
||||
|
||||
scan_table_source.data.update(
|
||||
scan=scan_list,
|
||||
hkl=hkl,
|
||||
fit=[0] * len(scan_list),
|
||||
export=export,
|
||||
twotheta=twotheta,
|
||||
gamma=gamma,
|
||||
omega=omega,
|
||||
chi=chi,
|
||||
phi=phi,
|
||||
nu=nu,
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)]
|
||||
merge_from_select.options = merge_options
|
||||
merge_from_select.value = merge_options[0][0]
|
||||
|
||||
file_select = MultiSelect(title="Select 2 .ccl files:", width=210, height=250)
|
||||
|
||||
def file_open_button_callback():
|
||||
if len(file_select.value) != 2:
|
||||
print("WARNING: Select exactly 2 .ccl files.")
|
||||
return
|
||||
|
||||
new_data1 = []
|
||||
new_data2 = []
|
||||
for ind, f_path in enumerate(file_select.value):
|
||||
with open(f_path) as file:
|
||||
f_name = os.path.basename(f_path)
|
||||
base, ext = os.path.splitext(f_name)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
return
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(file_data)
|
||||
|
||||
if ind == 0:
|
||||
js_data.data.update(fname=[base, base])
|
||||
new_data1 = file_data
|
||||
else: # ind = 1
|
||||
new_data2 = file_data
|
||||
|
||||
# ignore extra scans at the end of the longest of the two files
|
||||
min_len = min(len(new_data1), len(new_data2))
|
||||
new_data1 = new_data1[:min_len]
|
||||
new_data2 = new_data2[:min_len]
|
||||
|
||||
nonlocal det_data1, det_data2
|
||||
det_data1 = new_data1
|
||||
det_data2 = new_data2
|
||||
_init_datatable()
|
||||
|
||||
file_open_button = Button(label="Open New", width=100, disabled=True)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, _new):
|
||||
if len(upload_button.filename) != 2:
|
||||
print("WARNING: Upload exactly 2 .ccl files.")
|
||||
return
|
||||
|
||||
new_data1 = []
|
||||
new_data2 = []
|
||||
for ind, f_str, f_name in enumerate(zip(upload_button.value, upload_button.filename)):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
return
|
||||
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(file_data)
|
||||
|
||||
if ind == 0:
|
||||
js_data.data.update(fname=[base, base])
|
||||
new_data1 = file_data
|
||||
else: # ind = 1
|
||||
new_data2 = file_data
|
||||
|
||||
# ignore extra scans at the end of the longest of the two files
|
||||
min_len = min(len(new_data1), len(new_data2))
|
||||
new_data1 = new_data1[:min_len]
|
||||
new_data2 = new_data2[:min_len]
|
||||
|
||||
nonlocal det_data1, det_data2
|
||||
det_data1 = new_data1
|
||||
det_data2 = new_data2
|
||||
_init_datatable()
|
||||
|
||||
upload_div = Div(text="or upload 2 .ccl files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".ccl", multiple=True, width=200)
|
||||
# for on_change("value", ...) or on_change("filename", ...),
|
||||
# see https://github.com/bokeh/bokeh/issues/11461
|
||||
upload_button.on_change("filename", upload_button_callback)
|
||||
|
||||
def monitor_spinner_callback(_attr, old, new):
|
||||
if det_data1 and det_data2:
|
||||
pyzebra.normalize_dataset(det_data1, new)
|
||||
pyzebra.normalize_dataset(det_data2, new)
|
||||
_update_plot()
|
||||
|
||||
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
|
||||
monitor_spinner.on_change("value", monitor_spinner_callback)
|
||||
|
||||
def _update_table():
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data1]
|
||||
export = [scan["export"] for scan in det_data1]
|
||||
scan_table_source.data.update(fit=fit_ok, export=export)
|
||||
|
||||
def _update_plot():
|
||||
plot_scatter_source = [plot_scatter1_source, plot_scatter2_source]
|
||||
plot_fit_source = [plot_fit1_source, plot_fit2_source]
|
||||
plot_bkg_source = [plot_bkg1_source, plot_bkg2_source]
|
||||
plot_peak_source = [plot_peak1_source, plot_peak2_source]
|
||||
fit_output = ""
|
||||
|
||||
for ind, scan in enumerate(_get_selected_scan()):
|
||||
scatter_source = plot_scatter_source[ind]
|
||||
fit_source = plot_fit_source[ind]
|
||||
bkg_source = plot_bkg_source[ind]
|
||||
peak_source = plot_peak_source[ind]
|
||||
scan_motor = scan["scan_motor"]
|
||||
|
||||
y = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x = scan[scan_motor]
|
||||
|
||||
plot.axis[0].axis_label = scan_motor
|
||||
scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
|
||||
|
||||
fit = scan.get("fit")
|
||||
if fit is not None:
|
||||
x_fit = np.linspace(x[0], x[-1], 100)
|
||||
fit_source.data.update(x=x_fit, y=fit.eval(x=x_fit))
|
||||
|
||||
x_bkg = []
|
||||
y_bkg = []
|
||||
xs_peak = []
|
||||
ys_peak = []
|
||||
comps = fit.eval_components(x=x_fit)
|
||||
for i, model in enumerate(fit_params):
|
||||
if "linear" in model:
|
||||
x_bkg = x_fit
|
||||
y_bkg = comps[f"f{i}_"]
|
||||
|
||||
elif any(val in model for val in ("gaussian", "voigt", "pvoigt")):
|
||||
xs_peak.append(x_fit)
|
||||
ys_peak.append(comps[f"f{i}_"])
|
||||
|
||||
bkg_source.data.update(x=x_bkg, y=y_bkg)
|
||||
peak_source.data.update(xs=xs_peak, ys=ys_peak)
|
||||
if fit_output:
|
||||
fit_output = fit_output + "\n\n"
|
||||
fit_output = fit_output + fit.fit_report()
|
||||
|
||||
else:
|
||||
fit_source.data.update(x=[], y=[])
|
||||
bkg_source.data.update(x=[], y=[])
|
||||
peak_source.data.update(xs=[], ys=[])
|
||||
|
||||
fit_output_textinput.value = fit_output
|
||||
|
||||
# Main plot
|
||||
plot = Plot(
|
||||
x_range=DataRange1d(),
|
||||
y_range=DataRange1d(only_visible=True),
|
||||
plot_height=470,
|
||||
plot_width=700,
|
||||
)
|
||||
|
||||
plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
|
||||
plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
|
||||
|
||||
plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
plot_scatter1_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
|
||||
plot_scatter1 = plot.add_glyph(
|
||||
plot_scatter1_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
|
||||
)
|
||||
plot.add_layout(
|
||||
Whisker(source=plot_scatter1_source, base="x", upper="y_upper", lower="y_lower")
|
||||
)
|
||||
|
||||
plot_scatter2_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
|
||||
plot_scatter2 = plot.add_glyph(
|
||||
plot_scatter2_source, Scatter(x="x", y="y", line_color="firebrick", fill_color="firebrick")
|
||||
)
|
||||
plot.add_layout(
|
||||
Whisker(source=plot_scatter2_source, base="x", upper="y_upper", lower="y_lower")
|
||||
)
|
||||
|
||||
plot_fit1_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot_fit1 = plot.add_glyph(plot_fit1_source, Line(x="x", y="y"))
|
||||
|
||||
plot_fit2_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot_fit2 = plot.add_glyph(plot_fit2_source, Line(x="x", y="y"))
|
||||
|
||||
plot_bkg1_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot_bkg1 = plot.add_glyph(
|
||||
plot_bkg1_source, Line(x="x", y="y", line_color="steelblue", line_dash="dashed")
|
||||
)
|
||||
|
||||
plot_bkg2_source = ColumnDataSource(dict(x=[0], y=[0]))
|
||||
plot_bkg2 = plot.add_glyph(
|
||||
plot_bkg2_source, Line(x="x", y="y", line_color="firebrick", line_dash="dashed")
|
||||
)
|
||||
|
||||
plot_peak1_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
|
||||
plot_peak1 = plot.add_glyph(
|
||||
plot_peak1_source, MultiLine(xs="xs", ys="ys", line_color="steelblue", line_dash="dashed")
|
||||
)
|
||||
|
||||
plot_peak2_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
|
||||
plot_peak2 = plot.add_glyph(
|
||||
plot_peak2_source, MultiLine(xs="xs", ys="ys", line_color="firebrick", line_dash="dashed")
|
||||
)
|
||||
|
||||
fit_from_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
plot.add_layout(fit_from_span)
|
||||
|
||||
fit_to_span = Span(location=None, dimension="height", line_dash="dashed")
|
||||
plot.add_layout(fit_to_span)
|
||||
|
||||
plot.add_layout(
|
||||
Legend(
|
||||
items=[
|
||||
("data 1", [plot_scatter1]),
|
||||
("data 2", [plot_scatter2]),
|
||||
("best fit 1", [plot_fit1]),
|
||||
("best fit 2", [plot_fit2]),
|
||||
("peak 1", [plot_peak1]),
|
||||
("peak 2", [plot_peak2]),
|
||||
("linear 1", [plot_bkg1]),
|
||||
("linear 2", [plot_bkg2]),
|
||||
],
|
||||
location="top_left",
|
||||
click_policy="hide",
|
||||
)
|
||||
)
|
||||
|
||||
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
plot.toolbar.logo = None
|
||||
|
||||
# Scan select
|
||||
def scan_table_select_callback(_attr, old, new):
|
||||
if not new:
|
||||
# skip empty selections
|
||||
return
|
||||
|
||||
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
|
||||
if len(new) > 1:
|
||||
# drop selection to the previous one
|
||||
scan_table_source.selected.indices = old
|
||||
return
|
||||
|
||||
if len(old) > 1:
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
_update_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, new):
|
||||
# unfortunately, we don't know if the change comes from data update or user input
|
||||
# also `old` and `new` are the same for non-scalars
|
||||
for scan1, scan2, export in zip(det_data1, det_data2, new["export"]):
|
||||
scan1["export"] = export
|
||||
scan2["export"] = export
|
||||
_update_preview()
|
||||
|
||||
scan_table_source = ColumnDataSource(
|
||||
dict(
|
||||
scan=[],
|
||||
hkl=[],
|
||||
fit=[],
|
||||
export=[],
|
||||
twotheta=[],
|
||||
gamma=[],
|
||||
omega=[],
|
||||
chi=[],
|
||||
phi=[],
|
||||
nu=[],
|
||||
)
|
||||
)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="scan", title="Scan", editor=CellEditor(), width=50),
|
||||
TableColumn(field="hkl", title="hkl", editor=CellEditor(), width=100),
|
||||
TableColumn(field="fit", title="Fit", editor=CellEditor(), width=50),
|
||||
TableColumn(field="export", title="Export", editor=CheckboxEditor(), width=50),
|
||||
TableColumn(field="twotheta", title="2theta", editor=CellEditor(), width=50),
|
||||
TableColumn(field="gamma", title="gamma", editor=CellEditor(), width=50),
|
||||
TableColumn(field="omega", title="omega", editor=CellEditor(), width=50),
|
||||
TableColumn(field="chi", title="chi", editor=CellEditor(), width=50),
|
||||
TableColumn(field="phi", title="phi", editor=CellEditor(), width=50),
|
||||
TableColumn(field="nu", title="nu", editor=CellEditor(), width=50),
|
||||
],
|
||||
width=310, # +60 because of the index column, but excluding twotheta onwards
|
||||
height=350,
|
||||
autosize_mode="none",
|
||||
editable=True,
|
||||
)
|
||||
|
||||
def _get_selected_scan():
|
||||
ind = scan_table_source.selected.indices[0]
|
||||
return det_data1[ind], det_data2[ind]
|
||||
|
||||
merge_from_select = Select(title="scan:", width=145)
|
||||
|
||||
def merge_button_callback():
|
||||
scan_into1, scan_into2 = _get_selected_scan()
|
||||
scan_from1 = det_data1[int(merge_from_select.value)]
|
||||
scan_from2 = det_data2[int(merge_from_select.value)]
|
||||
|
||||
if scan_into1 is scan_from1:
|
||||
print("WARNING: Selected scans for merging are identical")
|
||||
return
|
||||
|
||||
pyzebra.merge_scans(scan_into1, scan_from1)
|
||||
pyzebra.merge_scans(scan_into2, scan_from2)
|
||||
_update_table()
|
||||
_update_plot()
|
||||
|
||||
merge_button = Button(label="Merge into current", width=145)
|
||||
merge_button.on_click(merge_button_callback)
|
||||
|
||||
def restore_button_callback():
|
||||
scan1, scan2 = _get_selected_scan()
|
||||
pyzebra.restore_scan(scan1)
|
||||
pyzebra.restore_scan(scan2)
|
||||
_update_table()
|
||||
_update_plot()
|
||||
|
||||
restore_button = Button(label="Restore scan", width=145)
|
||||
restore_button.on_click(restore_button_callback)
|
||||
|
||||
def fit_from_spinner_callback(_attr, _old, new):
|
||||
fit_from_span.location = new
|
||||
|
||||
fit_from_spinner = Spinner(title="Fit from:", width=145)
|
||||
fit_from_spinner.on_change("value", fit_from_spinner_callback)
|
||||
|
||||
def fit_to_spinner_callback(_attr, _old, new):
|
||||
fit_to_span.location = new
|
||||
|
||||
fit_to_spinner = Spinner(title="to:", width=145)
|
||||
fit_to_spinner.on_change("value", fit_to_spinner_callback)
|
||||
|
||||
def fitparams_add_dropdown_callback(click):
|
||||
# bokeh requires (str, str) for MultiSelect options
|
||||
new_tag = f"{click.item}-{fitparams_select.tags[0]}"
|
||||
fitparams_select.options.append((new_tag, click.item))
|
||||
fit_params[new_tag] = fitparams_factory(click.item)
|
||||
fitparams_select.tags[0] += 1
|
||||
|
||||
fitparams_add_dropdown = Dropdown(
|
||||
label="Add fit function",
|
||||
menu=[
|
||||
("Linear", "linear"),
|
||||
("Gaussian", "gaussian"),
|
||||
("Voigt", "voigt"),
|
||||
("Pseudo Voigt", "pvoigt"),
|
||||
# ("Pseudo Voigt1", "pseudovoigt1"),
|
||||
],
|
||||
width=145,
|
||||
)
|
||||
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
|
||||
|
||||
def fitparams_select_callback(_attr, old, new):
|
||||
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
|
||||
if len(new) > 1:
|
||||
# drop selection to the previous one
|
||||
fitparams_select.value = old
|
||||
return
|
||||
|
||||
if len(old) > 1:
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
if new:
|
||||
fitparams_table_source.data.update(fit_params[new[0]])
|
||||
else:
|
||||
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
|
||||
fitparams_select = MultiSelect(options=[], height=120, width=145)
|
||||
fitparams_select.tags = [0]
|
||||
fitparams_select.on_change("value", fitparams_select_callback)
|
||||
|
||||
def fitparams_remove_button_callback():
|
||||
if fitparams_select.value:
|
||||
sel_tag = fitparams_select.value[0]
|
||||
del fit_params[sel_tag]
|
||||
for elem in fitparams_select.options:
|
||||
if elem[0] == sel_tag:
|
||||
fitparams_select.options.remove(elem)
|
||||
break
|
||||
|
||||
fitparams_select.value = []
|
||||
|
||||
fitparams_remove_button = Button(label="Remove fit function", width=145)
|
||||
fitparams_remove_button.on_click(fitparams_remove_button_callback)
|
||||
|
||||
def fitparams_factory(function):
|
||||
if function == "linear":
|
||||
params = ["slope", "intercept"]
|
||||
elif function == "gaussian":
|
||||
params = ["amplitude", "center", "sigma"]
|
||||
elif function == "voigt":
|
||||
params = ["amplitude", "center", "sigma", "gamma"]
|
||||
elif function == "pvoigt":
|
||||
params = ["amplitude", "center", "sigma", "fraction"]
|
||||
elif function == "pseudovoigt1":
|
||||
params = ["amplitude", "center", "g_sigma", "l_sigma", "fraction"]
|
||||
else:
|
||||
raise ValueError("Unknown fit function")
|
||||
|
||||
n = len(params)
|
||||
fitparams = dict(
|
||||
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
|
||||
)
|
||||
|
||||
if function == "linear":
|
||||
fitparams["value"] = [0, 1]
|
||||
fitparams["vary"] = [False, True]
|
||||
fitparams["min"] = [None, 0]
|
||||
|
||||
elif function == "gaussian":
|
||||
fitparams["min"] = [0, None, None]
|
||||
|
||||
return fitparams
|
||||
|
||||
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
fitparams_table = DataTable(
|
||||
source=fitparams_table_source,
|
||||
columns=[
|
||||
TableColumn(field="param", title="Parameter", editor=CellEditor()),
|
||||
TableColumn(field="value", title="Value", editor=NumberEditor()),
|
||||
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
|
||||
TableColumn(field="min", title="Min", editor=NumberEditor()),
|
||||
TableColumn(field="max", title="Max", editor=NumberEditor()),
|
||||
],
|
||||
height=200,
|
||||
width=350,
|
||||
index_position=None,
|
||||
editable=True,
|
||||
auto_edit=True,
|
||||
)
|
||||
|
||||
# start with `background` and `gauss` fit functions added
|
||||
fitparams_add_dropdown_callback(types.SimpleNamespace(item="linear"))
|
||||
fitparams_add_dropdown_callback(types.SimpleNamespace(item="gaussian"))
|
||||
fitparams_select.value = ["gaussian-1"] # add selection to gauss
|
||||
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
|
||||
|
||||
def proc_all_button_callback():
|
||||
for scan in [*det_data1, *det_data2]:
|
||||
if scan["export"]:
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def proc_button_callback():
|
||||
for scan in _get_selected_scan():
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
|
||||
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
|
||||
|
||||
intensity_diff_div = Div(text="Intensity difference:", margin=(5, 5, 0, 5))
|
||||
intensity_diff_radiobutton = RadioGroup(
|
||||
labels=["file1 - file2", "file2 - file1"], active=0, width=145
|
||||
)
|
||||
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
|
||||
|
||||
export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400)
|
||||
|
||||
def _update_preview():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/temp"
|
||||
export_data1 = []
|
||||
export_data2 = []
|
||||
for scan1, scan2 in zip(det_data1, det_data2):
|
||||
if scan1["export"]:
|
||||
export_data1.append(scan1)
|
||||
export_data2.append(scan2)
|
||||
|
||||
if intensity_diff_radiobutton.active:
|
||||
export_data1, export_data2 = export_data2, export_data1
|
||||
|
||||
pyzebra.export_ccl_compare(
|
||||
export_data1,
|
||||
export_data2,
|
||||
temp_file,
|
||||
export_target_select.value,
|
||||
hkl_precision=int(hkl_precision_select.value),
|
||||
)
|
||||
|
||||
exported_content = ""
|
||||
file_content = []
|
||||
for ext in EXPORT_TARGETS[export_target_select.value]:
|
||||
fname = temp_file + ext
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
content = f.read()
|
||||
exported_content += f"{ext} file:\n" + content
|
||||
else:
|
||||
content = ""
|
||||
file_content.append(content)
|
||||
|
||||
js_data.data.update(content=file_content)
|
||||
export_preview_textinput.value = exported_content
|
||||
|
||||
def export_target_select_callback(_attr, _old, new):
|
||||
js_data.data.update(ext=EXPORT_TARGETS[new])
|
||||
_update_preview()
|
||||
|
||||
export_target_select = Select(
|
||||
title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80
|
||||
)
|
||||
export_target_select.on_change("value", export_target_select_callback)
|
||||
js_data.data.update(ext=EXPORT_TARGETS[export_target_select.value])
|
||||
|
||||
def hkl_precision_select_callback(_attr, _old, _new):
|
||||
_update_preview()
|
||||
|
||||
hkl_precision_select = Select(
|
||||
title="hkl precision:", options=["2", "3", "4"], value="2", width=80
|
||||
)
|
||||
hkl_precision_select.on_change("value", hkl_precision_select_callback)
|
||||
|
||||
save_button = Button(label="Download File(s)", button_type="success", width=200)
|
||||
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
|
||||
|
||||
fitpeak_controls = row(
|
||||
column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
|
||||
fitparams_table,
|
||||
Spacer(width=20),
|
||||
column(
|
||||
fit_from_spinner,
|
||||
lorentz_checkbox,
|
||||
area_method_div,
|
||||
area_method_radiobutton,
|
||||
intensity_diff_div,
|
||||
intensity_diff_radiobutton,
|
||||
),
|
||||
column(fit_to_spinner, proc_button, proc_all_button),
|
||||
)
|
||||
|
||||
scan_layout = column(
|
||||
scan_table,
|
||||
row(monitor_spinner, column(Spacer(height=19), restore_button)),
|
||||
row(column(Spacer(height=19), merge_button), merge_from_select),
|
||||
)
|
||||
|
||||
import_layout = column(file_select, file_open_button, upload_div, upload_button)
|
||||
|
||||
export_layout = column(
|
||||
export_preview_textinput,
|
||||
row(
|
||||
export_target_select, hkl_precision_select, column(Spacer(height=19), row(save_button))
|
||||
),
|
||||
)
|
||||
|
||||
tab_layout = column(
|
||||
row(import_layout, scan_layout, plot, Spacer(width=30), export_layout),
|
||||
row(fitpeak_controls, fit_output_textinput),
|
||||
)
|
||||
|
||||
return Panel(child=tab_layout, title="ccl compare")
|
@ -5,11 +5,14 @@ import tempfile
|
||||
import types
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
Button,
|
||||
CellEditor,
|
||||
CheckboxEditor,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
CustomJS,
|
||||
DataRange1d,
|
||||
@ -27,7 +30,7 @@ from bokeh.models import (
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioButtonGroup,
|
||||
RadioGroup,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
@ -36,165 +39,228 @@ from bokeh.models import (
|
||||
Spinner,
|
||||
TableColumn,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
Toggle,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import AREA_METHODS
|
||||
from pyzebra.ccl_io import EXPORT_TARGETS
|
||||
from pyzebra.ccl_process import AREA_METHODS
|
||||
|
||||
|
||||
javaScript = """
|
||||
let j = 0;
|
||||
for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
if (js_data.data['content'][i] === "") continue;
|
||||
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
setTimeout(function() {
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
}, 100 * j)
|
||||
|
||||
j++;
|
||||
}
|
||||
"""
|
||||
|
||||
PROPOSAL_PATH = "/afs/psi.ch/project/sinqdata/2020/zebra/"
|
||||
|
||||
|
||||
def create():
|
||||
det_data = {}
|
||||
doc = curdoc()
|
||||
det_data = []
|
||||
fit_params = {}
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""]))
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""], ext=["", ""]))
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
ccl_path = os.path.join(PROPOSAL_PATH, new.strip())
|
||||
ccl_file_list = []
|
||||
for file in os.listdir(ccl_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
ccl_file_list.append((os.path.join(ccl_path, file), file))
|
||||
file_select.options = ccl_file_list
|
||||
def file_select_update_for_proposal():
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
file_open_button.disabled = False
|
||||
file_append_button.disabled = False
|
||||
else:
|
||||
file_select.options = []
|
||||
file_open_button.disabled = True
|
||||
file_append_button.disabled = True
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", default_size=145)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update_for_proposal()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
scan_list = [s["idx"] for s in det_data]
|
||||
hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in det_data]
|
||||
export = [s.get("active", True) for s in det_data]
|
||||
export = [s["export"] for s in det_data]
|
||||
|
||||
twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in det_data]
|
||||
gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in det_data]
|
||||
omega = [np.median(s["omega"]) if "omega" in s else None for s in det_data]
|
||||
chi = [np.median(s["chi"]) if "chi" in s else None for s in det_data]
|
||||
phi = [np.median(s["phi"]) if "phi" in s else None for s in det_data]
|
||||
nu = [np.median(s["nu"]) if "nu" in s else None for s in det_data]
|
||||
|
||||
scan_table_source.data.update(
|
||||
scan=scan_list, hkl=hkl, fit=[0] * len(scan_list), export=export,
|
||||
scan=scan_list,
|
||||
hkl=hkl,
|
||||
fit=[0] * len(scan_list),
|
||||
export=export,
|
||||
twotheta=twotheta,
|
||||
gamma=gamma,
|
||||
omega=omega,
|
||||
chi=chi,
|
||||
phi=phi,
|
||||
nu=nu,
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)]
|
||||
merge_source_select.options = merge_options
|
||||
merge_source_select.value = merge_options[0][0]
|
||||
merge_dest_select.options = merge_options
|
||||
merge_dest_select.value = merge_options[0][0]
|
||||
merge_from_select.options = merge_options
|
||||
merge_from_select.value = merge_options[0][0]
|
||||
|
||||
def ccl_file_select_callback(_attr, _old, _new):
|
||||
pass
|
||||
|
||||
file_select = MultiSelect(title="Available .ccl/.dat files:", default_size=200, height=250)
|
||||
file_select.on_change("value", ccl_file_select_callback)
|
||||
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
new_data = []
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
f_name = os.path.basename(f_path)
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
_init_datatable()
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
file_open_button = Button(label="Open New", default_size=100)
|
||||
if not new_data: # first file
|
||||
new_data = file_data
|
||||
pyzebra.merge_duplicates(new_data)
|
||||
js_data.data.update(fname=[base, base])
|
||||
else:
|
||||
pyzebra.merge_datasets(new_data, file_data)
|
||||
|
||||
if new_data:
|
||||
det_data = new_data
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
file_open_button = Button(label="Open New", width=100, disabled=True)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
file_data = []
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
f_name = os.path.basename(f_path)
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
if file_data:
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", default_size=100)
|
||||
file_append_button = Button(label="Append", width=100, disabled=True)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
def upload_button_callback(_attr, _old, _new):
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_str, f_name in zip(new, upload_button.filename):
|
||||
new_data = []
|
||||
for f_str, f_name in zip(upload_button.value, upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
pyzebra.merge_duplicates(det_data)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
_init_datatable()
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
if not new_data: # first file
|
||||
new_data = file_data
|
||||
pyzebra.merge_duplicates(new_data)
|
||||
js_data.data.update(fname=[base, base])
|
||||
else:
|
||||
pyzebra.merge_datasets(new_data, file_data)
|
||||
|
||||
if new_data:
|
||||
det_data = new_data
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".ccl,.dat", multiple=True, default_size=200)
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
# for on_change("value", ...) or on_change("filename", ...),
|
||||
# see https://github.com/bokeh/bokeh/issues/11461
|
||||
upload_button.on_change("filename", upload_button_callback)
|
||||
|
||||
def append_upload_button_callback(_attr, _old, new):
|
||||
for f_str, f_name in zip(new, append_upload_button.filename):
|
||||
def append_upload_button_callback(_attr, _old, _new):
|
||||
file_data = []
|
||||
for f_str, f_name in zip(append_upload_button.value, append_upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
if file_data:
|
||||
_init_datatable()
|
||||
|
||||
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, default_size=200)
|
||||
append_upload_button.on_change("value", append_upload_button_callback)
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
|
||||
# for on_change("value", ...) or on_change("filename", ...),
|
||||
# see https://github.com/bokeh/bokeh/issues/11461
|
||||
append_upload_button.on_change("filename", append_upload_button_callback)
|
||||
|
||||
def monitor_spinner_callback(_attr, old, new):
|
||||
if det_data:
|
||||
pyzebra.normalize_dataset(det_data, new)
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_plot()
|
||||
|
||||
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
|
||||
monitor_spinner.on_change("value", monitor_spinner_callback)
|
||||
|
||||
def _update_table():
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok)
|
||||
export = [scan["export"] for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok, export=export)
|
||||
|
||||
def _update_plot(scan):
|
||||
def _update_plot():
|
||||
scan = _get_selected_scan()
|
||||
scan_motor = scan["scan_motor"]
|
||||
|
||||
y = scan["Counts"]
|
||||
y = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x = scan[scan_motor]
|
||||
|
||||
plot.axis[0].axis_label = scan_motor
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + np.sqrt(y), y_lower=y - np.sqrt(y))
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
|
||||
|
||||
fit = scan.get("fit")
|
||||
if fit is not None:
|
||||
@ -242,7 +308,7 @@ def create():
|
||||
|
||||
plot_scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
|
||||
plot_scatter = plot.add_glyph(
|
||||
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue")
|
||||
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
|
||||
)
|
||||
plot.add_layout(Whisker(source=plot_scatter_source, base="x", upper="y_upper", lower="y_lower"))
|
||||
|
||||
@ -254,7 +320,7 @@ def create():
|
||||
plot_bkg_source, Line(x="x", y="y", line_color="green", line_dash="dashed")
|
||||
)
|
||||
|
||||
plot_peak_source = ColumnDataSource(dict(xs=[0], ys=[0]))
|
||||
plot_peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
|
||||
plot_peak = plot.add_glyph(
|
||||
plot_peak_source, MultiLine(xs="xs", ys="ys", line_color="red", line_dash="dashed")
|
||||
)
|
||||
@ -297,55 +363,90 @@ def create():
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
_update_plot(det_data[new[0]])
|
||||
_update_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, new):
|
||||
# unfortunately, we don't know if the change comes from data update or user input
|
||||
# also `old` and `new` are the same for non-scalars
|
||||
for scan, export in zip(det_data, new["export"]):
|
||||
scan["export"] = export
|
||||
_update_preview()
|
||||
|
||||
scan_table_source = ColumnDataSource(
|
||||
dict(
|
||||
scan=[],
|
||||
hkl=[],
|
||||
fit=[],
|
||||
export=[],
|
||||
twotheta=[],
|
||||
gamma=[],
|
||||
omega=[],
|
||||
chi=[],
|
||||
phi=[],
|
||||
nu=[],
|
||||
)
|
||||
)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(scan=[], hkl=[], fit=[], export=[]))
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="scan", title="Scan", width=50),
|
||||
TableColumn(field="hkl", title="hkl", width=100),
|
||||
TableColumn(field="fit", title="Fit", width=50),
|
||||
TableColumn(field="scan", title="Scan", editor=CellEditor(), width=50),
|
||||
TableColumn(field="hkl", title="hkl", editor=CellEditor(), width=100),
|
||||
TableColumn(field="fit", title="Fit", editor=CellEditor(), width=50),
|
||||
TableColumn(field="export", title="Export", editor=CheckboxEditor(), width=50),
|
||||
TableColumn(field="twotheta", title="2theta", editor=CellEditor(), width=50),
|
||||
TableColumn(field="gamma", title="gamma", editor=CellEditor(), width=50),
|
||||
TableColumn(field="omega", title="omega", editor=CellEditor(), width=50),
|
||||
TableColumn(field="chi", title="chi", editor=CellEditor(), width=50),
|
||||
TableColumn(field="phi", title="phi", editor=CellEditor(), width=50),
|
||||
TableColumn(field="nu", title="nu", editor=CellEditor(), width=50),
|
||||
],
|
||||
width=310, # +60 because of the index column
|
||||
width=310, # +60 because of the index column, but excluding twotheta onwards
|
||||
height=350,
|
||||
autosize_mode="none",
|
||||
editable=True,
|
||||
)
|
||||
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
def _get_selected_scan():
|
||||
return det_data[scan_table_source.selected.indices[0]]
|
||||
|
||||
merge_dest_select = Select(title="destination:", width=100)
|
||||
merge_source_select = Select(title="source:", width=100)
|
||||
merge_from_select = Select(title="scan:", width=145)
|
||||
|
||||
def merge_button_callback():
|
||||
scan_dest_ind = int(merge_dest_select.value)
|
||||
scan_source_ind = int(merge_source_select.value)
|
||||
scan_into = _get_selected_scan()
|
||||
scan_from = det_data[int(merge_from_select.value)]
|
||||
|
||||
if scan_dest_ind == scan_source_ind:
|
||||
if scan_into is scan_from:
|
||||
print("WARNING: Selected scans for merging are identical")
|
||||
return
|
||||
|
||||
pyzebra.merge_scans(det_data[scan_dest_ind], det_data[scan_source_ind])
|
||||
_update_plot(_get_selected_scan())
|
||||
pyzebra.merge_scans(scan_into, scan_from)
|
||||
_update_table()
|
||||
_update_plot()
|
||||
|
||||
merge_button = Button(label="Merge scans", width=145)
|
||||
merge_button = Button(label="Merge into current", width=145)
|
||||
merge_button.on_click(merge_button_callback)
|
||||
|
||||
def restore_button_callback():
|
||||
pyzebra.restore_scan(_get_selected_scan())
|
||||
_update_table()
|
||||
_update_plot()
|
||||
|
||||
restore_button = Button(label="Restore scan", width=145)
|
||||
restore_button.on_click(restore_button_callback)
|
||||
|
||||
def fit_from_spinner_callback(_attr, _old, new):
|
||||
fit_from_span.location = new
|
||||
|
||||
fit_from_spinner = Spinner(title="Fit from:", default_size=145)
|
||||
fit_from_spinner = Spinner(title="Fit from:", width=145)
|
||||
fit_from_spinner.on_change("value", fit_from_spinner_callback)
|
||||
|
||||
def fit_to_spinner_callback(_attr, _old, new):
|
||||
fit_to_span.location = new
|
||||
|
||||
fit_to_spinner = Spinner(title="to:", default_size=145)
|
||||
fit_to_spinner = Spinner(title="to:", width=145)
|
||||
fit_to_spinner.on_change("value", fit_to_spinner_callback)
|
||||
|
||||
def fitparams_add_dropdown_callback(click):
|
||||
@ -364,8 +465,7 @@ def create():
|
||||
("Pseudo Voigt", "pvoigt"),
|
||||
# ("Pseudo Voigt1", "pseudovoigt1"),
|
||||
],
|
||||
default_size=145,
|
||||
disabled=True,
|
||||
width=145,
|
||||
)
|
||||
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
|
||||
|
||||
@ -385,7 +485,7 @@ def create():
|
||||
else:
|
||||
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
|
||||
fitparams_select = MultiSelect(options=[], height=120, default_size=145)
|
||||
fitparams_select = MultiSelect(options=[], height=120, width=145)
|
||||
fitparams_select.tags = [0]
|
||||
fitparams_select.on_change("value", fitparams_select_callback)
|
||||
|
||||
@ -400,7 +500,7 @@ def create():
|
||||
|
||||
fitparams_select.value = []
|
||||
|
||||
fitparams_remove_button = Button(label="Remove fit function", default_size=145, disabled=True)
|
||||
fitparams_remove_button = Button(label="Remove fit function", width=145)
|
||||
fitparams_remove_button.on_click(fitparams_remove_button_callback)
|
||||
|
||||
def fitparams_factory(function):
|
||||
@ -422,13 +522,21 @@ def create():
|
||||
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
|
||||
)
|
||||
|
||||
if function == "linear":
|
||||
fitparams["value"] = [0, 1]
|
||||
fitparams["vary"] = [False, True]
|
||||
fitparams["min"] = [None, 0]
|
||||
|
||||
elif function == "gaussian":
|
||||
fitparams["min"] = [0, None, None]
|
||||
|
||||
return fitparams
|
||||
|
||||
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
fitparams_table = DataTable(
|
||||
source=fitparams_table_source,
|
||||
columns=[
|
||||
TableColumn(field="param", title="Parameter"),
|
||||
TableColumn(field="param", title="Parameter", editor=CellEditor()),
|
||||
TableColumn(field="value", title="Value", editor=NumberEditor()),
|
||||
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
|
||||
TableColumn(field="min", title="Min", editor=NumberEditor()),
|
||||
@ -448,62 +556,66 @@ def create():
|
||||
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
|
||||
|
||||
def fit_all_button_callback():
|
||||
for scan, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
def proc_all_button_callback():
|
||||
for scan in det_data:
|
||||
if scan["export"]:
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot(_get_selected_scan())
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_all_button = Button(label="Fit All", button_type="primary", default_size=145)
|
||||
fit_all_button.on_click(fit_all_button_callback)
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def fit_button_callback():
|
||||
def proc_button_callback():
|
||||
scan = _get_selected_scan()
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot(scan)
|
||||
_update_plot()
|
||||
_update_table()
|
||||
|
||||
fit_button = Button(label="Fit Current", default_size=145)
|
||||
fit_button.on_click(fit_button_callback)
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
area_method_radiobutton = RadioButtonGroup(
|
||||
labels=["Fit area", "Int area"], active=0, default_size=145, disabled=True
|
||||
)
|
||||
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
|
||||
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
|
||||
|
||||
bin_size_spinner = Spinner(
|
||||
title="Bin size:", value=1, low=1, step=1, default_size=145, disabled=True
|
||||
)
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
|
||||
|
||||
lorentz_toggle = Toggle(label="Lorentz Correction", default_size=145)
|
||||
export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400)
|
||||
|
||||
export_preview_textinput = TextAreaInput(title="Export preview:", width=500, height=400)
|
||||
|
||||
def preview_button_callback():
|
||||
def _update_preview():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/temp"
|
||||
export_data = []
|
||||
for s, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
export_data.append(s)
|
||||
for scan in det_data:
|
||||
if scan["export"]:
|
||||
export_data.append(scan)
|
||||
|
||||
pyzebra.export_1D(
|
||||
export_data,
|
||||
temp_file,
|
||||
area_method=AREA_METHODS[int(area_method_radiobutton.active)],
|
||||
lorentz=lorentz_toggle.active,
|
||||
export_target_select.value,
|
||||
hkl_precision=int(hkl_precision_select.value),
|
||||
)
|
||||
|
||||
exported_content = ""
|
||||
file_content = []
|
||||
for ext in (".comm", ".incomm"):
|
||||
for ext in EXPORT_TARGETS[export_target_select.value]:
|
||||
fname = temp_file + ext
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
@ -516,36 +628,42 @@ def create():
|
||||
js_data.data.update(content=file_content)
|
||||
export_preview_textinput.value = exported_content
|
||||
|
||||
preview_button = Button(label="Preview", default_size=200)
|
||||
preview_button.on_click(preview_button_callback)
|
||||
def export_target_select_callback(_attr, _old, new):
|
||||
js_data.data.update(ext=EXPORT_TARGETS[new])
|
||||
_update_preview()
|
||||
|
||||
export_target_select = Select(
|
||||
title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80
|
||||
)
|
||||
export_target_select.on_change("value", export_target_select_callback)
|
||||
js_data.data.update(ext=EXPORT_TARGETS[export_target_select.value])
|
||||
|
||||
def hkl_precision_select_callback(_attr, _old, _new):
|
||||
_update_preview()
|
||||
|
||||
hkl_precision_select = Select(
|
||||
title="hkl precision:", options=["2", "3", "4"], value="2", default_size=80
|
||||
title="hkl precision:", options=["2", "3", "4"], value="2", width=80
|
||||
)
|
||||
hkl_precision_select.on_change("value", hkl_precision_select_callback)
|
||||
|
||||
save_button = Button(label="Download preview", button_type="success", default_size=200)
|
||||
save_button = Button(label="Download File(s)", button_type="success", width=200)
|
||||
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
|
||||
|
||||
fitpeak_controls = row(
|
||||
column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
|
||||
fitparams_table,
|
||||
Spacer(width=20),
|
||||
column(
|
||||
row(fit_from_spinner, fit_to_spinner),
|
||||
row(bin_size_spinner, column(Spacer(height=19), lorentz_toggle)),
|
||||
row(area_method_radiobutton),
|
||||
row(fit_button, fit_all_button),
|
||||
),
|
||||
column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
|
||||
column(fit_to_spinner, proc_button, proc_all_button),
|
||||
)
|
||||
|
||||
scan_layout = column(
|
||||
scan_table,
|
||||
monitor_spinner,
|
||||
row(column(Spacer(height=19), merge_button), merge_dest_select, merge_source_select),
|
||||
row(monitor_spinner, column(Spacer(height=19), restore_button)),
|
||||
row(column(Spacer(height=19), merge_button), merge_from_select),
|
||||
)
|
||||
|
||||
import_layout = column(
|
||||
proposal_textinput,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
upload_div,
|
||||
@ -556,7 +674,9 @@ def create():
|
||||
|
||||
export_layout = column(
|
||||
export_preview_textinput,
|
||||
row(hkl_precision_select, column(Spacer(height=19), row(preview_button, save_button))),
|
||||
row(
|
||||
export_target_select, hkl_precision_select, column(Spacer(height=19), row(save_button))
|
||||
),
|
||||
)
|
||||
|
||||
tab_layout = column(
|
||||
|
@ -1,5 +1,6 @@
|
||||
import base64
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
@ -10,9 +11,9 @@ from bokeh.models import (
|
||||
Div,
|
||||
FileInput,
|
||||
Panel,
|
||||
RadioButtonGroup,
|
||||
Select,
|
||||
Spacer,
|
||||
Tabs,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
)
|
||||
@ -29,7 +30,7 @@ def create():
|
||||
config.load_from_file(file)
|
||||
|
||||
logfile_textinput.value = config.logfile
|
||||
logfile_verbosity_select.value = config.logfile_verbosity
|
||||
logfile_verbosity.value = config.logfile_verbosity
|
||||
|
||||
filelist_type.value = config.filelist_type
|
||||
filelist_format_textinput.value = config.filelist_format
|
||||
@ -44,11 +45,16 @@ def create():
|
||||
ub_textareainput.value = config.crystal_UB
|
||||
|
||||
dataFactory_implementation_select.value = config.dataFactory_implementation
|
||||
dataFactory_dist1_textinput.value = config.dataFactory_dist1
|
||||
if config.dataFactory_dist1 is not None:
|
||||
dataFactory_dist1_textinput.value = config.dataFactory_dist1
|
||||
if config.dataFactory_dist2 is not None:
|
||||
dataFactory_dist2_textinput.value = config.dataFactory_dist2
|
||||
if config.dataFactory_dist3 is not None:
|
||||
dataFactory_dist3_textinput.value = config.dataFactory_dist3
|
||||
reflectionPrinter_format_select.value = config.reflectionPrinter_format
|
||||
|
||||
set_active_widgets(config.algorithm)
|
||||
if config.algorithm == "adaptivemaxcog":
|
||||
algorithm_params.active = 0
|
||||
threshold_textinput.value = config.threshold
|
||||
shell_textinput.value = config.shell
|
||||
steepness_textinput.value = config.steepness
|
||||
@ -57,6 +63,7 @@ def create():
|
||||
aps_window_textinput.value = str(tuple(map(int, config.aps_window.values())))
|
||||
|
||||
elif config.algorithm == "adaptivedynamic":
|
||||
algorithm_params.active = 1
|
||||
adm_window_textinput.value = str(tuple(map(int, config.adm_window.values())))
|
||||
border_textinput.value = str(tuple(map(int, config.border.values())))
|
||||
minWindow_textinput.value = str(tuple(map(int, config.minWindow.values())))
|
||||
@ -66,45 +73,16 @@ def create():
|
||||
loop_textinput.value = config.loop
|
||||
minPeakCount_textinput.value = config.minPeakCount
|
||||
displacementCurve_textinput.value = "\n".join(map(str, config.displacementCurve))
|
||||
|
||||
else:
|
||||
raise ValueError("Unknown processing mode.")
|
||||
|
||||
def set_active_widgets(implementation):
|
||||
if implementation == "adaptivemaxcog":
|
||||
mode_radio_button_group.active = 0
|
||||
disable_adaptivemaxcog = False
|
||||
disable_adaptivedynamic = True
|
||||
|
||||
elif implementation == "adaptivedynamic":
|
||||
mode_radio_button_group.active = 1
|
||||
disable_adaptivemaxcog = True
|
||||
disable_adaptivedynamic = False
|
||||
else:
|
||||
raise ValueError("Implementation can be either 'adaptivemaxcog' or 'adaptivedynamic'")
|
||||
|
||||
threshold_textinput.disabled = disable_adaptivemaxcog
|
||||
shell_textinput.disabled = disable_adaptivemaxcog
|
||||
steepness_textinput.disabled = disable_adaptivemaxcog
|
||||
duplicateDistance_textinput.disabled = disable_adaptivemaxcog
|
||||
maxequal_textinput.disabled = disable_adaptivemaxcog
|
||||
aps_window_textinput.disabled = disable_adaptivemaxcog
|
||||
|
||||
adm_window_textinput.disabled = disable_adaptivedynamic
|
||||
border_textinput.disabled = disable_adaptivedynamic
|
||||
minWindow_textinput.disabled = disable_adaptivedynamic
|
||||
reflectionFile_textinput.disabled = disable_adaptivedynamic
|
||||
targetMonitor_textinput.disabled = disable_adaptivedynamic
|
||||
smoothSize_textinput.disabled = disable_adaptivedynamic
|
||||
loop_textinput.disabled = disable_adaptivedynamic
|
||||
minPeakCount_textinput.disabled = disable_adaptivedynamic
|
||||
displacementCurve_textinput.disabled = disable_adaptivedynamic
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
with io.BytesIO(base64.b64decode(new)) as file:
|
||||
_load_config_file(file)
|
||||
|
||||
upload_div = Div(text="Open XML configuration file:")
|
||||
upload_button = FileInput(accept=".xml")
|
||||
upload_div = Div(text="Open .xml config:")
|
||||
upload_button = FileInput(accept=".xml", width=200)
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
|
||||
# General parameters
|
||||
@ -112,16 +90,14 @@ def create():
|
||||
def logfile_textinput_callback(_attr, _old, new):
|
||||
config.logfile = new
|
||||
|
||||
logfile_textinput = TextInput(title="Logfile:", value="logfile.log", width=320)
|
||||
logfile_textinput = TextInput(title="Logfile:", value="logfile.log")
|
||||
logfile_textinput.on_change("value", logfile_textinput_callback)
|
||||
|
||||
def logfile_verbosity_select_callback(_attr, _old, new):
|
||||
def logfile_verbosity_callback(_attr, _old, new):
|
||||
config.logfile_verbosity = new
|
||||
|
||||
logfile_verbosity_select = Select(
|
||||
title="verbosity:", options=["0", "5", "10", "15", "30"], width=70
|
||||
)
|
||||
logfile_verbosity_select.on_change("value", logfile_verbosity_select_callback)
|
||||
logfile_verbosity = TextInput(title="verbosity:", width=70)
|
||||
logfile_verbosity.on_change("value", logfile_verbosity_callback)
|
||||
|
||||
# ---- FileList
|
||||
def filelist_type_callback(_attr, _old, new):
|
||||
@ -148,20 +124,20 @@ def create():
|
||||
ranges.append(re.findall(r"\b\d+\b", line))
|
||||
config.filelist_ranges = ranges
|
||||
|
||||
filelist_ranges_textareainput = TextAreaInput(title="ranges:", height=100)
|
||||
filelist_ranges_textareainput = TextAreaInput(title="ranges:", rows=1)
|
||||
filelist_ranges_textareainput.on_change("value", filelist_ranges_textareainput_callback)
|
||||
|
||||
# ---- crystal
|
||||
def crystal_sample_textinput_callback(_attr, _old, new):
|
||||
config.crystal_sample = new
|
||||
|
||||
crystal_sample_textinput = TextInput(title="Sample Name:")
|
||||
crystal_sample_textinput = TextInput(title="Sample Name:", width=290)
|
||||
crystal_sample_textinput.on_change("value", crystal_sample_textinput_callback)
|
||||
|
||||
def lambda_textinput_callback(_attr, _old, new):
|
||||
config.crystal_lambda = new
|
||||
|
||||
lambda_textinput = TextInput(title="lambda:", width=145)
|
||||
lambda_textinput = TextInput(title="lambda:", width=100)
|
||||
lambda_textinput.on_change("value", lambda_textinput_callback)
|
||||
|
||||
def ub_textareainput_callback(_attr, _old, new):
|
||||
@ -173,19 +149,19 @@ def create():
|
||||
def zeroOM_textinput_callback(_attr, _old, new):
|
||||
config.crystal_zeroOM = new
|
||||
|
||||
zeroOM_textinput = TextInput(title="zeroOM:", width=145)
|
||||
zeroOM_textinput = TextInput(title="zeroOM:", width=100)
|
||||
zeroOM_textinput.on_change("value", zeroOM_textinput_callback)
|
||||
|
||||
def zeroSTT_textinput_callback(_attr, _old, new):
|
||||
config.crystal_zeroSTT = new
|
||||
|
||||
zeroSTT_textinput = TextInput(title="zeroSTT:", width=145)
|
||||
zeroSTT_textinput = TextInput(title="zeroSTT:", width=100)
|
||||
zeroSTT_textinput.on_change("value", zeroSTT_textinput_callback)
|
||||
|
||||
def zeroCHI_textinput_callback(_attr, _old, new):
|
||||
config.crystal_zeroCHI = new
|
||||
|
||||
zeroCHI_textinput = TextInput(title="zeroCHI:", width=145)
|
||||
zeroCHI_textinput = TextInput(title="zeroCHI:", width=100)
|
||||
zeroCHI_textinput.on_change("value", zeroCHI_textinput_callback)
|
||||
|
||||
# ---- DataFactory
|
||||
@ -200,9 +176,21 @@ def create():
|
||||
def dataFactory_dist1_textinput_callback(_attr, _old, new):
|
||||
config.dataFactory_dist1 = new
|
||||
|
||||
dataFactory_dist1_textinput = TextInput(title="dist1:", width=145)
|
||||
dataFactory_dist1_textinput = TextInput(title="dist1:", width=75)
|
||||
dataFactory_dist1_textinput.on_change("value", dataFactory_dist1_textinput_callback)
|
||||
|
||||
def dataFactory_dist2_textinput_callback(_attr, _old, new):
|
||||
config.dataFactory_dist2 = new
|
||||
|
||||
dataFactory_dist2_textinput = TextInput(title="dist2:", width=75)
|
||||
dataFactory_dist2_textinput.on_change("value", dataFactory_dist2_textinput_callback)
|
||||
|
||||
def dataFactory_dist3_textinput_callback(_attr, _old, new):
|
||||
config.dataFactory_dist3 = new
|
||||
|
||||
dataFactory_dist3_textinput = TextInput(title="dist3:", width=75)
|
||||
dataFactory_dist3_textinput.on_change("value", dataFactory_dist3_textinput_callback)
|
||||
|
||||
# ---- BackgroundProcessor
|
||||
|
||||
# ---- DetectorEfficency
|
||||
@ -221,42 +209,42 @@ def create():
|
||||
def threshold_textinput_callback(_attr, _old, new):
|
||||
config.threshold = new
|
||||
|
||||
threshold_textinput = TextInput(title="Threshold:")
|
||||
threshold_textinput = TextInput(title="Threshold:", width=145)
|
||||
threshold_textinput.on_change("value", threshold_textinput_callback)
|
||||
|
||||
# ---- shell
|
||||
def shell_textinput_callback(_attr, _old, new):
|
||||
config.shell = new
|
||||
|
||||
shell_textinput = TextInput(title="Shell:")
|
||||
shell_textinput = TextInput(title="Shell:", width=145)
|
||||
shell_textinput.on_change("value", shell_textinput_callback)
|
||||
|
||||
# ---- steepness
|
||||
def steepness_textinput_callback(_attr, _old, new):
|
||||
config.steepness = new
|
||||
|
||||
steepness_textinput = TextInput(title="Steepness:")
|
||||
steepness_textinput = TextInput(title="Steepness:", width=145)
|
||||
steepness_textinput.on_change("value", steepness_textinput_callback)
|
||||
|
||||
# ---- duplicateDistance
|
||||
def duplicateDistance_textinput_callback(_attr, _old, new):
|
||||
config.duplicateDistance = new
|
||||
|
||||
duplicateDistance_textinput = TextInput(title="Duplicate Distance:")
|
||||
duplicateDistance_textinput = TextInput(title="Duplicate Distance:", width=145)
|
||||
duplicateDistance_textinput.on_change("value", duplicateDistance_textinput_callback)
|
||||
|
||||
# ---- maxequal
|
||||
def maxequal_textinput_callback(_attr, _old, new):
|
||||
config.maxequal = new
|
||||
|
||||
maxequal_textinput = TextInput(title="Max Equal:")
|
||||
maxequal_textinput = TextInput(title="Max Equal:", width=145)
|
||||
maxequal_textinput.on_change("value", maxequal_textinput_callback)
|
||||
|
||||
# ---- window
|
||||
def aps_window_textinput_callback(_attr, _old, new):
|
||||
config.aps_window = dict(zip(("x", "y", "z"), re.findall(r"\b\d+\b", new)))
|
||||
|
||||
aps_window_textinput = TextInput(title="Window (x, y, z):")
|
||||
aps_window_textinput = TextInput(title="Window (x, y, z):", width=145)
|
||||
aps_window_textinput.on_change("value", aps_window_textinput_callback)
|
||||
|
||||
# Adaptive Dynamic Mask Integration (adaptivedynamic)
|
||||
@ -264,56 +252,56 @@ def create():
|
||||
def adm_window_textinput_callback(_attr, _old, new):
|
||||
config.adm_window = dict(zip(("x", "y", "z"), re.findall(r"\b\d+\b", new)))
|
||||
|
||||
adm_window_textinput = TextInput(title="Window (x, y, z):")
|
||||
adm_window_textinput = TextInput(title="Window (x, y, z):", width=145)
|
||||
adm_window_textinput.on_change("value", adm_window_textinput_callback)
|
||||
|
||||
# ---- border
|
||||
def border_textinput_callback(_attr, _old, new):
|
||||
config.border = dict(zip(("x", "y", "z"), re.findall(r"\b\d+\b", new)))
|
||||
|
||||
border_textinput = TextInput(title="Border (x, y, z):")
|
||||
border_textinput = TextInput(title="Border (x, y, z):", width=145)
|
||||
border_textinput.on_change("value", border_textinput_callback)
|
||||
|
||||
# ---- minWindow
|
||||
def minWindow_textinput_callback(_attr, _old, new):
|
||||
config.minWindow = dict(zip(("x", "y", "z"), re.findall(r"\b\d+\b", new)))
|
||||
|
||||
minWindow_textinput = TextInput(title="Min Window (x, y, z):")
|
||||
minWindow_textinput = TextInput(title="Min Window (x, y, z):", width=145)
|
||||
minWindow_textinput.on_change("value", minWindow_textinput_callback)
|
||||
|
||||
# ---- reflectionFile
|
||||
def reflectionFile_textinput_callback(_attr, _old, new):
|
||||
config.reflectionFile = new
|
||||
|
||||
reflectionFile_textinput = TextInput(title="Reflection File:")
|
||||
reflectionFile_textinput = TextInput(title="Reflection File:", width=145)
|
||||
reflectionFile_textinput.on_change("value", reflectionFile_textinput_callback)
|
||||
|
||||
# ---- targetMonitor
|
||||
def targetMonitor_textinput_callback(_attr, _old, new):
|
||||
config.targetMonitor = new
|
||||
|
||||
targetMonitor_textinput = TextInput(title="Target Monitor:")
|
||||
targetMonitor_textinput = TextInput(title="Target Monitor:", width=145)
|
||||
targetMonitor_textinput.on_change("value", targetMonitor_textinput_callback)
|
||||
|
||||
# ---- smoothSize
|
||||
def smoothSize_textinput_callback(_attr, _old, new):
|
||||
config.smoothSize = new
|
||||
|
||||
smoothSize_textinput = TextInput(title="Smooth Size:")
|
||||
smoothSize_textinput = TextInput(title="Smooth Size:", width=145)
|
||||
smoothSize_textinput.on_change("value", smoothSize_textinput_callback)
|
||||
|
||||
# ---- loop
|
||||
def loop_textinput_callback(_attr, _old, new):
|
||||
config.loop = new
|
||||
|
||||
loop_textinput = TextInput(title="Loop:")
|
||||
loop_textinput = TextInput(title="Loop:", width=145)
|
||||
loop_textinput.on_change("value", loop_textinput_callback)
|
||||
|
||||
# ---- minPeakCount
|
||||
def minPeakCount_textinput_callback(_attr, _old, new):
|
||||
config.minPeakCount = new
|
||||
|
||||
minPeakCount_textinput = TextInput(title="Min Peak Count:")
|
||||
minPeakCount_textinput = TextInput(title="Min Peak Count:", width=145)
|
||||
minPeakCount_textinput.on_change("value", minPeakCount_textinput_callback)
|
||||
|
||||
# ---- displacementCurve
|
||||
@ -324,95 +312,82 @@ def create():
|
||||
config.displacementCurve = maps
|
||||
|
||||
displacementCurve_textinput = TextAreaInput(
|
||||
title="Displacement Curve (twotheta, x, y):", height=100
|
||||
title="Displ. Curve (2θ, x, y):", width=145, height=100
|
||||
)
|
||||
displacementCurve_textinput.on_change("value", displacementCurve_textinput_callback)
|
||||
|
||||
def mode_radio_button_group_callback(active):
|
||||
if active == 0:
|
||||
def algorithm_tabs_callback(_attr, _old, new):
|
||||
if new == 0:
|
||||
config.algorithm = "adaptivemaxcog"
|
||||
set_active_widgets("adaptivemaxcog")
|
||||
else:
|
||||
config.algorithm = "adaptivedynamic"
|
||||
set_active_widgets("adaptivedynamic")
|
||||
|
||||
mode_radio_button_group = RadioButtonGroup(
|
||||
labels=["Adaptive Peak Detection", "Adaptive Dynamic Integration"], active=0
|
||||
algorithm_params = Tabs(
|
||||
tabs=[
|
||||
Panel(
|
||||
child=column(
|
||||
row(threshold_textinput, shell_textinput, steepness_textinput),
|
||||
row(duplicateDistance_textinput, maxequal_textinput, aps_window_textinput),
|
||||
),
|
||||
title="Peak Search",
|
||||
),
|
||||
Panel(
|
||||
child=column(
|
||||
row(adm_window_textinput, border_textinput, minWindow_textinput),
|
||||
row(reflectionFile_textinput, targetMonitor_textinput, smoothSize_textinput),
|
||||
row(loop_textinput, minPeakCount_textinput, displacementCurve_textinput),
|
||||
),
|
||||
title="Dynamic Integration",
|
||||
),
|
||||
]
|
||||
)
|
||||
mode_radio_button_group.on_click(mode_radio_button_group_callback)
|
||||
set_active_widgets("adaptivemaxcog")
|
||||
algorithm_params.on_change("active", algorithm_tabs_callback)
|
||||
|
||||
def process_button_callback():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/temp.xml"
|
||||
temp_file = temp_dir + "/config.xml"
|
||||
config.save_as(temp_file)
|
||||
if doc.anatric_path:
|
||||
pyzebra.anatric(temp_file, anatric_path=doc.anatric_path)
|
||||
else:
|
||||
pyzebra.anatric(temp_file)
|
||||
pyzebra.anatric(temp_file, anatric_path=doc.anatric_path, cwd=temp_dir)
|
||||
|
||||
with open(config.logfile) as f_log:
|
||||
with open(os.path.join(temp_dir, config.logfile)) as f_log:
|
||||
output_log.value = f_log.read()
|
||||
|
||||
with open(os.path.join(temp_dir, config.reflectionPrinter_file)) as f_res:
|
||||
output_res.value = f_res.read()
|
||||
|
||||
process_button = Button(label="Process", button_type="primary")
|
||||
process_button.on_click(process_button_callback)
|
||||
|
||||
output_log = TextAreaInput(title="Logfile output:", height=600, disabled=True)
|
||||
output_config = TextAreaInput(title="Current config:", height=600, width=400, disabled=True)
|
||||
output_log = TextAreaInput(title="Logfile output:", height=320, width=465, disabled=True)
|
||||
output_res = TextAreaInput(title="Result output:", height=320, width=465, disabled=True)
|
||||
output_config = TextAreaInput(title="Current config:", height=320, width=465, disabled=True)
|
||||
|
||||
general_params_layout = column(
|
||||
row(logfile_textinput, logfile_verbosity_select),
|
||||
row(column(Spacer(height=2), upload_div), upload_button),
|
||||
row(logfile_textinput, logfile_verbosity),
|
||||
row(filelist_type, filelist_format_textinput),
|
||||
filelist_datapath_textinput,
|
||||
filelist_ranges_textareainput,
|
||||
crystal_sample_textinput,
|
||||
row(lambda_textinput, zeroOM_textinput),
|
||||
row(zeroSTT_textinput, zeroCHI_textinput),
|
||||
row(crystal_sample_textinput, lambda_textinput),
|
||||
ub_textareainput,
|
||||
row(dataFactory_implementation_select, dataFactory_dist1_textinput),
|
||||
reflectionPrinter_format_select,
|
||||
row(zeroOM_textinput, zeroSTT_textinput, zeroCHI_textinput),
|
||||
row(
|
||||
dataFactory_implementation_select,
|
||||
dataFactory_dist1_textinput,
|
||||
dataFactory_dist2_textinput,
|
||||
dataFactory_dist3_textinput,
|
||||
),
|
||||
row(reflectionPrinter_format_select),
|
||||
)
|
||||
|
||||
algorithm_params_layout = column(
|
||||
mode_radio_button_group,
|
||||
row(
|
||||
column(
|
||||
threshold_textinput,
|
||||
shell_textinput,
|
||||
steepness_textinput,
|
||||
duplicateDistance_textinput,
|
||||
maxequal_textinput,
|
||||
aps_window_textinput,
|
||||
),
|
||||
column(
|
||||
adm_window_textinput,
|
||||
border_textinput,
|
||||
minWindow_textinput,
|
||||
reflectionFile_textinput,
|
||||
targetMonitor_textinput,
|
||||
smoothSize_textinput,
|
||||
loop_textinput,
|
||||
minPeakCount_textinput,
|
||||
displacementCurve_textinput,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
tab_layout = column(
|
||||
row(column(Spacer(height=2), upload_div), upload_button),
|
||||
row(
|
||||
general_params_layout,
|
||||
algorithm_params_layout,
|
||||
column(row(output_config, output_log), row(process_button)),
|
||||
),
|
||||
tab_layout = row(
|
||||
general_params_layout,
|
||||
column(output_config, algorithm_params, row(process_button)),
|
||||
column(output_log, output_res),
|
||||
)
|
||||
|
||||
async def update_config():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/debug.xml"
|
||||
config.save_as(temp_file)
|
||||
with open(temp_file) as f_config:
|
||||
output_config.value = f_config.read()
|
||||
output_config.value = config.tostring()
|
||||
|
||||
doc.add_periodic_callback(update_config, 1000)
|
||||
|
||||
|
582
pyzebra/app/panel_hdf_param_study.py
Normal file
582
pyzebra/app/panel_hdf_param_study.py
Normal file
@ -0,0 +1,582 @@
|
||||
import base64
|
||||
import io
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, gridplot, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
BoxZoomTool,
|
||||
Button,
|
||||
CellEditor,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
DataRange1d,
|
||||
DataTable,
|
||||
Div,
|
||||
FileInput,
|
||||
Grid,
|
||||
MultiSelect,
|
||||
NumberEditor,
|
||||
NumberFormatter,
|
||||
Image,
|
||||
LinearAxis,
|
||||
LinearColorMapper,
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
Range1d,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
Spinner,
|
||||
TableColumn,
|
||||
Tabs,
|
||||
Title,
|
||||
WheelZoomTool,
|
||||
)
|
||||
from bokeh.palettes import Cividis256, Greys256, Plasma256 # pylint: disable=E0611
|
||||
|
||||
import pyzebra
|
||||
|
||||
IMAGE_W = 256
|
||||
IMAGE_H = 128
|
||||
IMAGE_PLOT_W = int(IMAGE_W * 2) + 52
|
||||
IMAGE_PLOT_H = int(IMAGE_H * 2) + 27
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
zebra_data = []
|
||||
det_data = {}
|
||||
cami_meta = {}
|
||||
|
||||
num_formatter = NumberFormatter(format="0.00", nan_format="")
|
||||
|
||||
def file_select_update():
|
||||
if data_source.value == "proposal number":
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
else:
|
||||
file_select.options = []
|
||||
|
||||
else: # "cami file"
|
||||
if not cami_meta:
|
||||
file_select.options = []
|
||||
return
|
||||
|
||||
file_list = cami_meta["filelist"]
|
||||
file_select.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
|
||||
def data_source_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
data_source = Select(
|
||||
title="Data Source:",
|
||||
value="proposal number",
|
||||
options=["proposal number", "cami file"],
|
||||
width=210,
|
||||
)
|
||||
data_source.on_change("value", data_source_callback)
|
||||
|
||||
doc.add_periodic_callback(file_select_update, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
nonlocal cami_meta
|
||||
with io.StringIO(base64.b64decode(new).decode()) as file:
|
||||
cami_meta = pyzebra.parse_h5meta(file)
|
||||
data_source.value = "cami file"
|
||||
file_select_update()
|
||||
|
||||
upload_div = Div(text="or upload .cami file:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".cami", width=200)
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
|
||||
file_select = MultiSelect(title="Available .hdf files:", width=210, height=320)
|
||||
|
||||
def _init_datatable():
|
||||
file_list = []
|
||||
for scan in zebra_data:
|
||||
file_list.append(os.path.basename(scan["original_filename"]))
|
||||
|
||||
scan_table_source.data.update(
|
||||
file=file_list,
|
||||
param=[None] * len(zebra_data),
|
||||
frame=[None] * len(zebra_data),
|
||||
x_pos=[None] * len(zebra_data),
|
||||
y_pos=[None] * len(zebra_data),
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
param_select.value = "user defined"
|
||||
|
||||
def _update_table():
|
||||
frame = []
|
||||
x_pos = []
|
||||
y_pos = []
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
framei = scan["fit"]["frame"]
|
||||
x_posi = scan["fit"]["x_pos"]
|
||||
y_posi = scan["fit"]["y_pos"]
|
||||
else:
|
||||
framei = x_posi = y_posi = None
|
||||
|
||||
frame.append(framei)
|
||||
x_pos.append(x_posi)
|
||||
y_pos.append(y_posi)
|
||||
|
||||
scan_table_source.data.update(frame=frame, x_pos=x_pos, y_pos=y_pos)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal zebra_data
|
||||
zebra_data = []
|
||||
for f_name in file_select.value:
|
||||
zebra_data.append(pyzebra.read_detector_data(f_name))
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_open_button = Button(label="Open New", width=100)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
zebra_data.append(pyzebra.read_detector_data(f_name))
|
||||
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", width=100)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
# Scan select
|
||||
def scan_table_select_callback(_attr, old, new):
|
||||
nonlocal det_data
|
||||
|
||||
if not new:
|
||||
# skip empty selections
|
||||
return
|
||||
|
||||
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
|
||||
if len(new) > 1:
|
||||
# drop selection to the previous one
|
||||
scan_table_source.selected.indices = old
|
||||
return
|
||||
|
||||
if len(old) > 1:
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
det_data = zebra_data[new[0]]
|
||||
|
||||
zebra_mode = det_data["zebra_mode"]
|
||||
if zebra_mode == "nb":
|
||||
metadata_table_source.data.update(geom=["normal beam"])
|
||||
else: # zebra_mode == "bi"
|
||||
metadata_table_source.data.update(geom=["bisecting"])
|
||||
|
||||
if "mf" in det_data:
|
||||
metadata_table_source.data.update(mf=[det_data["mf"][0]])
|
||||
else:
|
||||
metadata_table_source.data.update(mf=[None])
|
||||
|
||||
if "temp" in det_data:
|
||||
metadata_table_source.data.update(temp=[det_data["temp"][0]])
|
||||
else:
|
||||
metadata_table_source.data.update(temp=[None])
|
||||
|
||||
update_overview_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
pass
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(file=[], param=[], frame=[], x_pos=[], y_pos=[]))
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="file", title="file", editor=CellEditor(), width=150),
|
||||
TableColumn(
|
||||
field="param",
|
||||
title="param",
|
||||
formatter=num_formatter,
|
||||
editor=NumberEditor(),
|
||||
width=50,
|
||||
),
|
||||
TableColumn(
|
||||
field="frame", title="Frame", formatter=num_formatter, editor=CellEditor(), width=70
|
||||
),
|
||||
TableColumn(
|
||||
field="x_pos", title="X", formatter=num_formatter, editor=CellEditor(), width=70
|
||||
),
|
||||
TableColumn(
|
||||
field="y_pos", title="Y", formatter=num_formatter, editor=CellEditor(), width=70
|
||||
),
|
||||
],
|
||||
width=470, # +60 because of the index column
|
||||
height=420,
|
||||
editable=True,
|
||||
autosize_mode="none",
|
||||
)
|
||||
|
||||
def param_select_callback(_attr, _old, new):
|
||||
if new == "user defined":
|
||||
param = [None] * len(zebra_data)
|
||||
else:
|
||||
# TODO: which value to take?
|
||||
param = [scan[new][0] for scan in zebra_data]
|
||||
|
||||
scan_table_source.data["param"] = param
|
||||
_update_param_plot()
|
||||
|
||||
param_select = Select(
|
||||
title="Parameter:",
|
||||
options=["user defined", "temp", "mf", "h", "k", "l"],
|
||||
value="user defined",
|
||||
width=145,
|
||||
)
|
||||
param_select.on_change("value", param_select_callback)
|
||||
|
||||
def update_overview_plot():
|
||||
h5_data = det_data["data"]
|
||||
n_im, n_y, n_x = h5_data.shape
|
||||
overview_x = np.mean(h5_data, axis=1)
|
||||
overview_y = np.mean(h5_data, axis=2)
|
||||
|
||||
# normalize for simpler colormapping
|
||||
overview_max_val = max(np.max(overview_x), np.max(overview_y))
|
||||
overview_x = 1000 * overview_x / overview_max_val
|
||||
overview_y = 1000 * overview_y / overview_max_val
|
||||
|
||||
overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x], dh=[n_im])
|
||||
overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y], dh=[n_im])
|
||||
|
||||
if proj_auto_checkbox.active:
|
||||
im_min = min(np.min(overview_x), np.min(overview_y))
|
||||
im_max = max(np.max(overview_x), np.max(overview_y))
|
||||
|
||||
proj_display_min_spinner.value = im_min
|
||||
proj_display_max_spinner.value = im_max
|
||||
|
||||
overview_plot_x_image_glyph.color_mapper.low = im_min
|
||||
overview_plot_y_image_glyph.color_mapper.low = im_min
|
||||
overview_plot_x_image_glyph.color_mapper.high = im_max
|
||||
overview_plot_y_image_glyph.color_mapper.high = im_max
|
||||
|
||||
frame_range.start = 0
|
||||
frame_range.end = n_im
|
||||
frame_range.reset_start = 0
|
||||
frame_range.reset_end = n_im
|
||||
frame_range.bounds = (0, n_im)
|
||||
|
||||
scan_motor = det_data["scan_motor"]
|
||||
overview_plot_y.axis[1].axis_label = f"Scanning motor, {scan_motor}"
|
||||
|
||||
var = det_data[scan_motor]
|
||||
var_start = var[0]
|
||||
var_end = var[-1] + (var[-1] - var[0]) / (n_im - 1)
|
||||
|
||||
scanning_motor_range.start = var_start
|
||||
scanning_motor_range.end = var_end
|
||||
scanning_motor_range.reset_start = var_start
|
||||
scanning_motor_range.reset_end = var_end
|
||||
# handle both, ascending and descending sequences
|
||||
scanning_motor_range.bounds = (min(var_start, var_end), max(var_start, var_end))
|
||||
|
||||
# shared frame ranges
|
||||
frame_range = Range1d(0, 1, bounds=(0, 1))
|
||||
scanning_motor_range = Range1d(0, 1, bounds=(0, 1))
|
||||
|
||||
det_x_range = Range1d(0, IMAGE_W, bounds=(0, IMAGE_W))
|
||||
overview_plot_x = Plot(
|
||||
title=Title(text="Projections on X-axis"),
|
||||
x_range=det_x_range,
|
||||
y_range=frame_range,
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_W - 3,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
wheelzoomtool = WheelZoomTool(maintain_focus=False)
|
||||
overview_plot_x.toolbar.logo = None
|
||||
overview_plot_x.add_tools(
|
||||
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
|
||||
)
|
||||
overview_plot_x.toolbar.active_scroll = wheelzoomtool
|
||||
|
||||
# ---- axes
|
||||
overview_plot_x.add_layout(LinearAxis(axis_label="Coordinate X, pix"), place="below")
|
||||
overview_plot_x.add_layout(
|
||||
LinearAxis(axis_label="Frame", major_label_orientation="vertical"), place="left"
|
||||
)
|
||||
|
||||
# ---- grid lines
|
||||
overview_plot_x.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
overview_plot_x.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
# ---- rgba image glyph
|
||||
overview_plot_x_image_source = ColumnDataSource(
|
||||
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_W], dh=[1])
|
||||
)
|
||||
|
||||
overview_plot_x_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
overview_plot_x.add_glyph(
|
||||
overview_plot_x_image_source, overview_plot_x_image_glyph, name="image_glyph"
|
||||
)
|
||||
|
||||
det_y_range = Range1d(0, IMAGE_H, bounds=(0, IMAGE_H))
|
||||
overview_plot_y = Plot(
|
||||
title=Title(text="Projections on Y-axis"),
|
||||
x_range=det_y_range,
|
||||
y_range=frame_range,
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_H + 22,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
wheelzoomtool = WheelZoomTool(maintain_focus=False)
|
||||
overview_plot_y.toolbar.logo = None
|
||||
overview_plot_y.add_tools(
|
||||
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
|
||||
)
|
||||
overview_plot_y.toolbar.active_scroll = wheelzoomtool
|
||||
|
||||
# ---- axes
|
||||
overview_plot_y.add_layout(LinearAxis(axis_label="Coordinate Y, pix"), place="below")
|
||||
overview_plot_y.add_layout(
|
||||
LinearAxis(
|
||||
y_range_name="scanning_motor",
|
||||
axis_label="Scanning motor",
|
||||
major_label_orientation="vertical",
|
||||
),
|
||||
place="right",
|
||||
)
|
||||
|
||||
# ---- grid lines
|
||||
overview_plot_y.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
overview_plot_y.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
# ---- rgba image glyph
|
||||
overview_plot_y_image_source = ColumnDataSource(
|
||||
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_H], dh=[1])
|
||||
)
|
||||
|
||||
overview_plot_y_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
overview_plot_y.add_glyph(
|
||||
overview_plot_y_image_source, overview_plot_y_image_glyph, name="image_glyph"
|
||||
)
|
||||
|
||||
cmap_dict = {
|
||||
"gray": Greys256,
|
||||
"gray_reversed": Greys256[::-1],
|
||||
"plasma": Plasma256,
|
||||
"cividis": Cividis256,
|
||||
}
|
||||
|
||||
def colormap_callback(_attr, _old, new):
|
||||
overview_plot_x_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
overview_plot_y_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
|
||||
colormap = Select(title="Colormap:", options=list(cmap_dict.keys()), width=210)
|
||||
colormap.on_change("value", colormap_callback)
|
||||
colormap.value = "plasma"
|
||||
|
||||
PROJ_STEP = 1
|
||||
|
||||
def proj_auto_checkbox_callback(state):
|
||||
if state:
|
||||
proj_display_min_spinner.disabled = True
|
||||
proj_display_max_spinner.disabled = True
|
||||
else:
|
||||
proj_display_min_spinner.disabled = False
|
||||
proj_display_max_spinner.disabled = False
|
||||
|
||||
update_overview_plot()
|
||||
|
||||
proj_auto_checkbox = CheckboxGroup(
|
||||
labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
proj_auto_checkbox.on_click(proj_auto_checkbox_callback)
|
||||
|
||||
def proj_display_max_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_min_spinner.high = new_value - PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.high = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.high = new_value
|
||||
|
||||
proj_display_max_spinner = Spinner(
|
||||
low=0 + PROJ_STEP,
|
||||
value=1,
|
||||
step=PROJ_STEP,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_max_spinner.on_change("value", proj_display_max_spinner_callback)
|
||||
|
||||
def proj_display_min_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_max_spinner.low = new_value + PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.low = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.low = new_value
|
||||
|
||||
proj_display_min_spinner = Spinner(
|
||||
low=0,
|
||||
high=1 - PROJ_STEP,
|
||||
value=0,
|
||||
step=PROJ_STEP,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback)
|
||||
|
||||
metadata_table_source = ColumnDataSource(dict(geom=[""], temp=[None], mf=[None]))
|
||||
metadata_table = DataTable(
|
||||
source=metadata_table_source,
|
||||
columns=[
|
||||
TableColumn(field="geom", title="Geometry", width=100),
|
||||
TableColumn(field="temp", title="Temperature", formatter=num_formatter, width=100),
|
||||
TableColumn(field="mf", title="Magnetic Field", formatter=num_formatter, width=100),
|
||||
],
|
||||
width=300,
|
||||
height=50,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
|
||||
def _update_param_plot():
|
||||
x = []
|
||||
y = []
|
||||
fit_param = fit_param_select.value
|
||||
for s, p in zip(zebra_data, scan_table_source.data["param"]):
|
||||
if "fit" in s and fit_param:
|
||||
x.append(p)
|
||||
y.append(s["fit"][fit_param])
|
||||
param_plot_scatter_source.data.update(x=x, y=y)
|
||||
|
||||
# Parameter plot
|
||||
param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
|
||||
param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
|
||||
|
||||
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[]))
|
||||
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
|
||||
|
||||
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
param_plot.toolbar.logo = None
|
||||
|
||||
def fit_param_select_callback(_attr, _old, _new):
|
||||
_update_param_plot()
|
||||
|
||||
fit_param_select = Select(title="Fit parameter", options=[], width=145)
|
||||
fit_param_select.on_change("value", fit_param_select_callback)
|
||||
|
||||
def proc_all_button_callback():
|
||||
for scan in zebra_data:
|
||||
pyzebra.fit_event(
|
||||
scan,
|
||||
int(np.floor(frame_range.start)),
|
||||
int(np.ceil(frame_range.end)),
|
||||
int(np.floor(det_y_range.start)),
|
||||
int(np.ceil(det_y_range.end)),
|
||||
int(np.floor(det_x_range.start)),
|
||||
int(np.ceil(det_x_range.end)),
|
||||
)
|
||||
|
||||
_update_table()
|
||||
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
_update_param_plot()
|
||||
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def proc_button_callback():
|
||||
pyzebra.fit_event(
|
||||
det_data,
|
||||
int(np.floor(frame_range.start)),
|
||||
int(np.ceil(frame_range.end)),
|
||||
int(np.floor(det_y_range.start)),
|
||||
int(np.ceil(det_y_range.end)),
|
||||
int(np.floor(det_x_range.start)),
|
||||
int(np.ceil(det_x_range.end)),
|
||||
)
|
||||
|
||||
_update_table()
|
||||
|
||||
for scan in zebra_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
_update_param_plot()
|
||||
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
layout_controls = row(
|
||||
colormap,
|
||||
column(proj_auto_checkbox, row(proj_display_min_spinner, proj_display_max_spinner)),
|
||||
proc_button,
|
||||
proc_all_button,
|
||||
)
|
||||
|
||||
layout_overview = column(
|
||||
gridplot(
|
||||
[[overview_plot_x, overview_plot_y]],
|
||||
toolbar_options=dict(logo=None),
|
||||
merge_tools=True,
|
||||
toolbar_location="left",
|
||||
),
|
||||
layout_controls,
|
||||
)
|
||||
|
||||
# Plot tabs
|
||||
plots = Tabs(
|
||||
tabs=[
|
||||
Panel(child=layout_overview, title="single scan"),
|
||||
Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"),
|
||||
]
|
||||
)
|
||||
|
||||
# Final layout
|
||||
import_layout = column(
|
||||
data_source,
|
||||
upload_div,
|
||||
upload_button,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
)
|
||||
|
||||
scan_layout = column(scan_table, row(param_select, metadata_table))
|
||||
|
||||
tab_layout = column(row(import_layout, scan_layout, plots))
|
||||
|
||||
return Panel(child=tab_layout, title="hdf param study")
|
@ -3,17 +3,23 @@ import io
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from bokeh.events import MouseEnter
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, gridplot, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
BoxEditTool,
|
||||
BoxZoomTool,
|
||||
Button,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
DataRange1d,
|
||||
DataTable,
|
||||
Div,
|
||||
FileInput,
|
||||
Grid,
|
||||
MultiSelect,
|
||||
NumberFormatter,
|
||||
HoverTool,
|
||||
Image,
|
||||
Line,
|
||||
@ -22,17 +28,16 @@ from bokeh.models import (
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioButtonGroup,
|
||||
Range1d,
|
||||
Rect,
|
||||
ResetTool,
|
||||
Select,
|
||||
Slider,
|
||||
Spacer,
|
||||
Spinner,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
TableColumn,
|
||||
Tabs,
|
||||
Title,
|
||||
Toggle,
|
||||
WheelZoomTool,
|
||||
)
|
||||
from bokeh.palettes import Cividis256, Greys256, Plasma256 # pylint: disable=E0611
|
||||
@ -41,38 +46,102 @@ import pyzebra
|
||||
|
||||
IMAGE_W = 256
|
||||
IMAGE_H = 128
|
||||
IMAGE_PLOT_W = int(IMAGE_W * 2.5)
|
||||
IMAGE_PLOT_H = int(IMAGE_H * 2.5)
|
||||
|
||||
PROPOSAL_PATH = "/afs/psi.ch/project/sinqdata/2020/zebra/"
|
||||
IMAGE_PLOT_W = int(IMAGE_W * 2) + 52
|
||||
IMAGE_PLOT_H = int(IMAGE_H * 2) + 27
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data = {}
|
||||
roi_selection = {}
|
||||
cami_meta = {}
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
full_proposal_path = os.path.join(PROPOSAL_PATH, new.strip())
|
||||
file_list = []
|
||||
for file in os.listdir(full_proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(full_proposal_path, file), file))
|
||||
filelist.options = file_list
|
||||
filelist.value = file_list[0][0]
|
||||
num_formatter = NumberFormatter(format="0.00", nan_format="")
|
||||
|
||||
proposal_textinput = TextInput(title="Enter proposal number:", default_size=145)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
def file_select_update():
|
||||
if data_source.value == "proposal number":
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith(".hdf"):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
else:
|
||||
file_select.options = []
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
else: # "cami file"
|
||||
if not cami_meta:
|
||||
file_select.options = []
|
||||
return
|
||||
|
||||
file_list = cami_meta["filelist"]
|
||||
file_select.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
|
||||
def data_source_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
data_source = Select(
|
||||
title="Data Source:",
|
||||
value="proposal number",
|
||||
options=["proposal number", "cami file"],
|
||||
width=210,
|
||||
)
|
||||
data_source.on_change("value", data_source_callback)
|
||||
|
||||
doc.add_periodic_callback(file_select_update, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def upload_cami_button_callback(_attr, _old, new):
|
||||
nonlocal cami_meta
|
||||
with io.StringIO(base64.b64decode(new).decode()) as file:
|
||||
h5meta_list = pyzebra.parse_h5meta(file)
|
||||
file_list = h5meta_list["filelist"]
|
||||
filelist.options = [(entry, os.path.basename(entry)) for entry in file_list]
|
||||
filelist.value = file_list[0]
|
||||
cami_meta = pyzebra.parse_h5meta(file)
|
||||
data_source.value = "cami file"
|
||||
file_select_update()
|
||||
|
||||
upload_div = Div(text="or upload .cami file:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".cami")
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
upload_cami_div = Div(text="or upload .cami file:", margin=(5, 5, 0, 5))
|
||||
upload_cami_button = FileInput(accept=".cami", width=200)
|
||||
upload_cami_button.on_change("value", upload_cami_button_callback)
|
||||
|
||||
def _open_file(file, cami_meta):
|
||||
nonlocal det_data
|
||||
det_data = pyzebra.read_detector_data(file, cami_meta)
|
||||
|
||||
index_spinner.value = 0
|
||||
index_spinner.high = det_data["data"].shape[0] - 1
|
||||
index_slider.end = det_data["data"].shape[0] - 1
|
||||
|
||||
zebra_mode = det_data["zebra_mode"]
|
||||
if zebra_mode == "nb":
|
||||
metadata_table_source.data.update(geom=["normal beam"])
|
||||
else: # zebra_mode == "bi"
|
||||
metadata_table_source.data.update(geom=["bisecting"])
|
||||
|
||||
update_image(0)
|
||||
update_overview_plot()
|
||||
|
||||
def upload_hdf_button_callback(_attr, _old, new):
|
||||
_open_file(io.BytesIO(base64.b64decode(new)), None)
|
||||
|
||||
upload_hdf_div = Div(text="or upload .hdf file:", margin=(5, 5, 0, 5))
|
||||
upload_hdf_button = FileInput(accept=".hdf", width=200)
|
||||
upload_hdf_button.on_change("value", upload_hdf_button_callback)
|
||||
|
||||
def file_open_button_callback():
|
||||
if not file_select.value:
|
||||
return
|
||||
|
||||
if data_source.value == "proposal number":
|
||||
_open_file(file_select.value[0], None)
|
||||
else:
|
||||
_open_file(file_select.value[0], cami_meta)
|
||||
|
||||
file_open_button = Button(label="Open New", width=100)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def update_image(index=None):
|
||||
if index is None:
|
||||
@ -91,7 +160,7 @@ def create():
|
||||
)
|
||||
image_source.data.update(image=[current_image])
|
||||
|
||||
if auto_toggle.active:
|
||||
if main_auto_checkbox.active:
|
||||
im_min = np.min(current_image)
|
||||
im_max = np.max(current_image)
|
||||
|
||||
@ -102,29 +171,62 @@ def create():
|
||||
image_glyph.color_mapper.high = im_max
|
||||
|
||||
if "mf" in det_data:
|
||||
mf_spinner.value = det_data["mf"][index]
|
||||
metadata_table_source.data.update(mf=[det_data["mf"][index]])
|
||||
else:
|
||||
mf_spinner.value = None
|
||||
metadata_table_source.data.update(mf=[None])
|
||||
|
||||
if "temp" in det_data:
|
||||
temp_spinner.value = det_data["temp"][index]
|
||||
metadata_table_source.data.update(temp=[det_data["temp"][index]])
|
||||
else:
|
||||
temp_spinner.value = None
|
||||
metadata_table_source.data.update(temp=[None])
|
||||
|
||||
gamma, nu = calculate_pol(det_data, index)
|
||||
omega = np.ones((IMAGE_H, IMAGE_W)) * det_data["omega"][index]
|
||||
image_source.data.update(gamma=[gamma], nu=[nu], omega=[omega])
|
||||
|
||||
# update detector center angles
|
||||
det_c_x = int(IMAGE_W / 2)
|
||||
det_c_y = int(IMAGE_H / 2)
|
||||
if det_data["zebra_mode"] == "nb":
|
||||
gamma_c = gamma[det_c_y, det_c_x]
|
||||
nu_c = nu[det_c_y, det_c_x]
|
||||
omega_c = omega[det_c_y, det_c_x]
|
||||
chi_c = None
|
||||
phi_c = None
|
||||
|
||||
else: # zebra_mode == "bi"
|
||||
wave = det_data["wave"]
|
||||
ddist = det_data["ddist"]
|
||||
gammad = det_data["gamma"][index]
|
||||
om = det_data["omega"][index]
|
||||
ch = det_data["chi"][index]
|
||||
ph = det_data["phi"][index]
|
||||
nud = det_data["nu"]
|
||||
|
||||
nu_c = 0
|
||||
chi_c, phi_c, gamma_c, omega_c = pyzebra.ang_proc(
|
||||
wave, ddist, gammad, om, ch, ph, nud, det_c_x, det_c_y
|
||||
)
|
||||
|
||||
detcenter_table_source.data.update(
|
||||
gamma=[gamma_c], nu=[nu_c], omega=[omega_c], chi=[chi_c], phi=[phi_c],
|
||||
)
|
||||
|
||||
def update_overview_plot():
|
||||
h5_data = det_data["data"]
|
||||
n_im, n_y, n_x = h5_data.shape
|
||||
overview_x = np.mean(h5_data, axis=1)
|
||||
overview_y = np.mean(h5_data, axis=2)
|
||||
|
||||
overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x])
|
||||
overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y])
|
||||
# normalize for simpler colormapping
|
||||
overview_max_val = max(np.max(overview_x), np.max(overview_y))
|
||||
overview_x = 1000 * overview_x / overview_max_val
|
||||
overview_y = 1000 * overview_y / overview_max_val
|
||||
|
||||
if proj_auto_toggle.active:
|
||||
overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x], dh=[n_im])
|
||||
overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y], dh=[n_im])
|
||||
|
||||
if proj_auto_checkbox.active:
|
||||
im_min = min(np.min(overview_x), np.min(overview_y))
|
||||
im_max = max(np.max(overview_x), np.max(overview_y))
|
||||
|
||||
@ -136,48 +238,76 @@ def create():
|
||||
overview_plot_x_image_glyph.color_mapper.high = im_max
|
||||
overview_plot_y_image_glyph.color_mapper.high = im_max
|
||||
|
||||
if frame_button_group.active == 0: # Frame
|
||||
overview_plot_x.axis[1].axis_label = "Frame"
|
||||
overview_plot_y.axis[1].axis_label = "Frame"
|
||||
frame_range.start = 0
|
||||
frame_range.end = n_im
|
||||
frame_range.reset_start = 0
|
||||
frame_range.reset_end = n_im
|
||||
frame_range.bounds = (0, n_im)
|
||||
|
||||
overview_plot_x_image_source.data.update(y=[0], dh=[n_im])
|
||||
overview_plot_y_image_source.data.update(y=[0], dh=[n_im])
|
||||
scan_motor = det_data["scan_motor"]
|
||||
overview_plot_y.axis[1].axis_label = f"Scanning motor, {scan_motor}"
|
||||
|
||||
elif frame_button_group.active == 1: # Variable angle
|
||||
scan_motor = det_data["scan_motor"]
|
||||
overview_plot_x.axis[1].axis_label = scan_motor
|
||||
overview_plot_y.axis[1].axis_label = scan_motor
|
||||
var = det_data[scan_motor]
|
||||
var_start = var[0]
|
||||
var_end = var[-1] + (var[-1] - var[0]) / (n_im - 1)
|
||||
|
||||
var = det_data[scan_motor]
|
||||
var_start = var[0]
|
||||
var_end = (var[-1] - var[0]) * n_im / (n_im - 1)
|
||||
overview_plot_x_image_source.data.update(y=[var_start], dh=[var_end])
|
||||
overview_plot_y_image_source.data.update(y=[var_start], dh=[var_end])
|
||||
scanning_motor_range.start = var_start
|
||||
scanning_motor_range.end = var_end
|
||||
scanning_motor_range.reset_start = var_start
|
||||
scanning_motor_range.reset_end = var_end
|
||||
# handle both, ascending and descending sequences
|
||||
scanning_motor_range.bounds = (min(var_start, var_end), max(var_start, var_end))
|
||||
|
||||
def filelist_callback(_attr, _old, new):
|
||||
nonlocal det_data
|
||||
det_data = pyzebra.read_detector_data(new)
|
||||
gamma = image_source.data["gamma"][0]
|
||||
gamma_start = gamma[0, 0]
|
||||
gamma_end = gamma[0, -1]
|
||||
|
||||
index_spinner.value = 0
|
||||
index_spinner.high = det_data["data"].shape[0] - 1
|
||||
gamma_range.start = gamma_start
|
||||
gamma_range.end = gamma_end
|
||||
gamma_range.reset_start = gamma_start
|
||||
gamma_range.reset_end = gamma_end
|
||||
gamma_range.bounds = (min(gamma_start, gamma_end), max(gamma_start, gamma_end))
|
||||
|
||||
zebra_mode = det_data["zebra_mode"]
|
||||
if zebra_mode == "nb":
|
||||
geometry_textinput.value = "normal beam"
|
||||
else: # zebra_mode == "bi"
|
||||
geometry_textinput.value = "bisecting"
|
||||
nu = image_source.data["nu"][0]
|
||||
nu_start = nu[0, 0]
|
||||
nu_end = nu[-1, 0]
|
||||
|
||||
update_image(0)
|
||||
update_overview_plot()
|
||||
nu_range.start = nu_start
|
||||
nu_range.end = nu_end
|
||||
nu_range.reset_start = nu_start
|
||||
nu_range.reset_end = nu_end
|
||||
nu_range.bounds = (min(nu_start, nu_end), max(nu_start, nu_end))
|
||||
|
||||
filelist = Select(title="Available .hdf files:")
|
||||
filelist.on_change("value", filelist_callback)
|
||||
def file_select_callback(_attr, old, new):
|
||||
if not new:
|
||||
# skip empty selections
|
||||
return
|
||||
|
||||
def index_spinner_callback(_attr, _old, new):
|
||||
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
|
||||
if len(new) > 1:
|
||||
# drop selection to the previous one
|
||||
file_select.value = old
|
||||
return
|
||||
|
||||
if len(old) > 1:
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
file_open_button_callback()
|
||||
|
||||
file_select = MultiSelect(title="Available .hdf files:", width=210, height=250)
|
||||
file_select.on_change("value", file_select_callback)
|
||||
|
||||
def index_callback(_attr, _old, new):
|
||||
update_image(new)
|
||||
|
||||
index_spinner = Spinner(title="Image index:", value=0, low=0)
|
||||
index_spinner.on_change("value", index_spinner_callback)
|
||||
index_slider = Slider(value=0, start=0, end=1, show_value=False, width=400)
|
||||
|
||||
index_spinner = Spinner(title="Image index:", value=0, low=0, width=100)
|
||||
index_spinner.on_change("value", index_callback)
|
||||
|
||||
index_slider.js_link("value_throttled", index_spinner, "value")
|
||||
index_spinner.js_link("value", index_slider, "value")
|
||||
|
||||
plot = Plot(
|
||||
x_range=Range1d(0, IMAGE_W, bounds=(0, IMAGE_W)),
|
||||
@ -232,6 +362,15 @@ def create():
|
||||
image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
plot.add_glyph(image_source, image_glyph, name="image_glyph")
|
||||
|
||||
# calculate hkl-indices of first mouse entry
|
||||
def mouse_enter_callback(_event):
|
||||
if det_data and np.array_equal(image_source.data["h"][0], np.zeros((1, 1))):
|
||||
index = index_spinner.value
|
||||
h, k, l = calculate_hkl(det_data, index)
|
||||
image_source.data.update(h=[h], k=[k], l=[l])
|
||||
|
||||
plot.on_event(MouseEnter, mouse_enter_callback)
|
||||
|
||||
# ---- projections
|
||||
proj_v = Plot(
|
||||
x_range=plot.x_range,
|
||||
@ -310,15 +449,20 @@ def create():
|
||||
)
|
||||
plot.toolbar.active_scroll = wheelzoomtool
|
||||
|
||||
# shared frame range
|
||||
frame_range = DataRange1d()
|
||||
# shared frame ranges
|
||||
frame_range = Range1d(0, 1, bounds=(0, 1))
|
||||
scanning_motor_range = Range1d(0, 1, bounds=(0, 1))
|
||||
|
||||
det_x_range = Range1d(0, IMAGE_W, bounds=(0, IMAGE_W))
|
||||
gamma_range = Range1d(0, 1, bounds=(0, 1))
|
||||
overview_plot_x = Plot(
|
||||
title=Title(text="Projections on X-axis"),
|
||||
x_range=det_x_range,
|
||||
y_range=frame_range,
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_W,
|
||||
extra_x_ranges={"gamma": gamma_range},
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=450,
|
||||
plot_width=IMAGE_PLOT_W - 3,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
@ -331,6 +475,9 @@ def create():
|
||||
|
||||
# ---- axes
|
||||
overview_plot_x.add_layout(LinearAxis(axis_label="Coordinate X, pix"), place="below")
|
||||
overview_plot_x.add_layout(
|
||||
LinearAxis(x_range_name="gamma", axis_label="Gamma, deg"), place="above"
|
||||
)
|
||||
overview_plot_x.add_layout(
|
||||
LinearAxis(axis_label="Frame", major_label_orientation="vertical"), place="left"
|
||||
)
|
||||
@ -350,12 +497,15 @@ def create():
|
||||
)
|
||||
|
||||
det_y_range = Range1d(0, IMAGE_H, bounds=(0, IMAGE_H))
|
||||
nu_range = Range1d(0, 1, bounds=(0, 1))
|
||||
overview_plot_y = Plot(
|
||||
title=Title(text="Projections on Y-axis"),
|
||||
x_range=det_y_range,
|
||||
y_range=frame_range,
|
||||
plot_height=400,
|
||||
plot_width=IMAGE_PLOT_H,
|
||||
extra_x_ranges={"nu": nu_range},
|
||||
extra_y_ranges={"scanning_motor": scanning_motor_range},
|
||||
plot_height=450,
|
||||
plot_width=IMAGE_PLOT_H + 22,
|
||||
)
|
||||
|
||||
# ---- tools
|
||||
@ -368,8 +518,14 @@ def create():
|
||||
|
||||
# ---- axes
|
||||
overview_plot_y.add_layout(LinearAxis(axis_label="Coordinate Y, pix"), place="below")
|
||||
overview_plot_y.add_layout(LinearAxis(x_range_name="nu", axis_label="Nu, deg"), place="above")
|
||||
overview_plot_y.add_layout(
|
||||
LinearAxis(axis_label="Frame", major_label_orientation="vertical"), place="left"
|
||||
LinearAxis(
|
||||
y_range_name="scanning_motor",
|
||||
axis_label="Scanning motor",
|
||||
major_label_orientation="vertical",
|
||||
),
|
||||
place="right",
|
||||
)
|
||||
|
||||
# ---- grid lines
|
||||
@ -386,16 +542,10 @@ def create():
|
||||
overview_plot_y_image_source, overview_plot_y_image_glyph, name="image_glyph"
|
||||
)
|
||||
|
||||
def frame_button_group_callback(_active):
|
||||
update_overview_plot()
|
||||
|
||||
frame_button_group = RadioButtonGroup(labels=["Frames", "Variable Angle"], active=0)
|
||||
frame_button_group.on_click(frame_button_group_callback)
|
||||
|
||||
roi_avg_plot = Plot(
|
||||
x_range=DataRange1d(),
|
||||
y_range=DataRange1d(),
|
||||
plot_height=200,
|
||||
plot_height=150,
|
||||
plot_width=IMAGE_PLOT_W,
|
||||
toolbar_location="left",
|
||||
)
|
||||
@ -426,13 +576,13 @@ def create():
|
||||
overview_plot_x_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
overview_plot_y_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
|
||||
|
||||
colormap = Select(title="Colormap:", options=list(cmap_dict.keys()), default_size=145)
|
||||
colormap = Select(title="Colormap:", options=list(cmap_dict.keys()), width=210)
|
||||
colormap.on_change("value", colormap_callback)
|
||||
colormap.value = "plasma"
|
||||
|
||||
STEP = 1
|
||||
# ---- colormap auto toggle button
|
||||
def auto_toggle_callback(state):
|
||||
|
||||
def main_auto_checkbox_callback(state):
|
||||
if state:
|
||||
display_min_spinner.disabled = True
|
||||
display_max_spinner.disabled = True
|
||||
@ -442,45 +592,43 @@ def create():
|
||||
|
||||
update_image()
|
||||
|
||||
auto_toggle = Toggle(
|
||||
label="Main Auto Range", active=True, button_type="default", default_size=125
|
||||
main_auto_checkbox = CheckboxGroup(
|
||||
labels=["Frame Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
auto_toggle.on_click(auto_toggle_callback)
|
||||
main_auto_checkbox.on_click(main_auto_checkbox_callback)
|
||||
|
||||
# ---- colormap display max value
|
||||
def display_max_spinner_callback(_attr, _old_value, new_value):
|
||||
display_min_spinner.high = new_value - STEP
|
||||
image_glyph.color_mapper.high = new_value
|
||||
|
||||
display_max_spinner = Spinner(
|
||||
title="Max Value:",
|
||||
low=0 + STEP,
|
||||
value=1,
|
||||
step=STEP,
|
||||
disabled=auto_toggle.active,
|
||||
default_size=80,
|
||||
disabled=bool(main_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
display_max_spinner.on_change("value", display_max_spinner_callback)
|
||||
|
||||
# ---- colormap display min value
|
||||
def display_min_spinner_callback(_attr, _old_value, new_value):
|
||||
display_max_spinner.low = new_value + STEP
|
||||
image_glyph.color_mapper.low = new_value
|
||||
|
||||
display_min_spinner = Spinner(
|
||||
title="Min Value:",
|
||||
low=0,
|
||||
high=1 - STEP,
|
||||
value=0,
|
||||
step=STEP,
|
||||
disabled=auto_toggle.active,
|
||||
default_size=80,
|
||||
disabled=bool(main_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
display_min_spinner.on_change("value", display_min_spinner_callback)
|
||||
|
||||
PROJ_STEP = 0.1
|
||||
# ---- proj colormap auto toggle button
|
||||
def proj_auto_toggle_callback(state):
|
||||
PROJ_STEP = 1
|
||||
|
||||
def proj_auto_checkbox_callback(state):
|
||||
if state:
|
||||
proj_display_min_spinner.disabled = True
|
||||
proj_display_max_spinner.disabled = True
|
||||
@ -490,102 +638,221 @@ def create():
|
||||
|
||||
update_overview_plot()
|
||||
|
||||
proj_auto_toggle = Toggle(
|
||||
label="Proj Auto Range", active=True, button_type="default", default_size=125
|
||||
proj_auto_checkbox = CheckboxGroup(
|
||||
labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
|
||||
)
|
||||
proj_auto_toggle.on_click(proj_auto_toggle_callback)
|
||||
proj_auto_checkbox.on_click(proj_auto_checkbox_callback)
|
||||
|
||||
# ---- proj colormap display max value
|
||||
def proj_display_max_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_min_spinner.high = new_value - PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.high = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.high = new_value
|
||||
|
||||
proj_display_max_spinner = Spinner(
|
||||
title="Max Value:",
|
||||
low=0 + PROJ_STEP,
|
||||
value=1,
|
||||
step=PROJ_STEP,
|
||||
disabled=proj_auto_toggle.active,
|
||||
default_size=80,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_max_spinner.on_change("value", proj_display_max_spinner_callback)
|
||||
|
||||
# ---- proj colormap display min value
|
||||
def proj_display_min_spinner_callback(_attr, _old_value, new_value):
|
||||
proj_display_max_spinner.low = new_value + PROJ_STEP
|
||||
overview_plot_x_image_glyph.color_mapper.low = new_value
|
||||
overview_plot_y_image_glyph.color_mapper.low = new_value
|
||||
|
||||
proj_display_min_spinner = Spinner(
|
||||
title="Min Value:",
|
||||
low=0,
|
||||
high=1 - PROJ_STEP,
|
||||
value=0,
|
||||
step=PROJ_STEP,
|
||||
disabled=proj_auto_toggle.active,
|
||||
default_size=80,
|
||||
disabled=bool(proj_auto_checkbox.active),
|
||||
width=100,
|
||||
height=31,
|
||||
)
|
||||
proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback)
|
||||
|
||||
def hkl_button_callback():
|
||||
index = index_spinner.value
|
||||
h, k, l = calculate_hkl(det_data, index)
|
||||
image_source.data.update(h=[h], k=[k], l=[l])
|
||||
events_data = dict(
|
||||
wave=[],
|
||||
ddist=[],
|
||||
cell=[],
|
||||
frame=[],
|
||||
x_pos=[],
|
||||
y_pos=[],
|
||||
intensity=[],
|
||||
snr_cnts=[],
|
||||
gamma=[],
|
||||
omega=[],
|
||||
chi=[],
|
||||
phi=[],
|
||||
nu=[],
|
||||
)
|
||||
doc.events_data = events_data
|
||||
|
||||
hkl_button = Button(label="Calculate hkl (slow)")
|
||||
hkl_button.on_click(hkl_button_callback)
|
||||
events_table_source = ColumnDataSource(events_data)
|
||||
events_table = DataTable(
|
||||
source=events_table_source,
|
||||
columns=[
|
||||
TableColumn(field="frame", title="Frame", formatter=num_formatter, width=70),
|
||||
TableColumn(field="x_pos", title="X", formatter=num_formatter, width=70),
|
||||
TableColumn(field="y_pos", title="Y", formatter=num_formatter, width=70),
|
||||
TableColumn(field="intensity", title="Intensity", formatter=num_formatter, width=70),
|
||||
TableColumn(field="gamma", title="Gamma", formatter=num_formatter, width=70),
|
||||
TableColumn(field="omega", title="Omega", formatter=num_formatter, width=70),
|
||||
TableColumn(field="chi", title="Chi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="phi", title="Phi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="nu", title="Nu", formatter=num_formatter, width=70),
|
||||
],
|
||||
height=150,
|
||||
width=630,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
|
||||
selection_list = TextAreaInput(rows=7)
|
||||
detcenter_table_source = ColumnDataSource(dict(gamma=[], omega=[], chi=[], phi=[], nu=[]))
|
||||
detcenter_table = DataTable(
|
||||
source=detcenter_table_source,
|
||||
columns=[
|
||||
TableColumn(field="gamma", title="Gamma", formatter=num_formatter, width=70),
|
||||
TableColumn(field="omega", title="Omega", formatter=num_formatter, width=70),
|
||||
TableColumn(field="chi", title="Chi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="phi", title="Phi", formatter=num_formatter, width=70),
|
||||
TableColumn(field="nu", title="Nu", formatter=num_formatter, width=70),
|
||||
],
|
||||
height=150,
|
||||
width=350,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
|
||||
def selection_button_callback():
|
||||
nonlocal roi_selection
|
||||
selection = [
|
||||
int(np.floor(det_x_range.start)),
|
||||
int(np.ceil(det_x_range.end)),
|
||||
int(np.floor(det_y_range.start)),
|
||||
int(np.ceil(det_y_range.end)),
|
||||
def add_event_button_callback():
|
||||
pyzebra.fit_event(
|
||||
det_data,
|
||||
int(np.floor(frame_range.start)),
|
||||
int(np.ceil(frame_range.end)),
|
||||
]
|
||||
int(np.floor(det_y_range.start)),
|
||||
int(np.ceil(det_y_range.end)),
|
||||
int(np.floor(det_x_range.start)),
|
||||
int(np.ceil(det_x_range.end)),
|
||||
)
|
||||
|
||||
filename_id = filelist.value[-8:-4]
|
||||
if filename_id in roi_selection:
|
||||
roi_selection[f"{filename_id}"].append(selection)
|
||||
else:
|
||||
roi_selection[f"{filename_id}"] = [selection]
|
||||
wave = det_data["wave"]
|
||||
ddist = det_data["ddist"]
|
||||
cell = det_data["cell"]
|
||||
|
||||
selection_list.value = str(roi_selection)
|
||||
gamma = det_data["gamma"][0]
|
||||
omega = det_data["omega"][0]
|
||||
nu = det_data["nu"][0]
|
||||
chi = det_data["chi"][0]
|
||||
phi = det_data["phi"][0]
|
||||
|
||||
selection_button = Button(label="Add selection")
|
||||
selection_button.on_click(selection_button_callback)
|
||||
scan_motor = det_data["scan_motor"]
|
||||
var_angle = det_data[scan_motor]
|
||||
|
||||
mf_spinner = Spinner(
|
||||
title="Magnetic field:", format="0.00", width=145, disabled=True
|
||||
snr_cnts = det_data["fit"]["snr"]
|
||||
frC = det_data["fit"]["frame"]
|
||||
|
||||
var_F = var_angle[int(np.floor(frC))]
|
||||
var_C = var_angle[int(np.ceil(frC))]
|
||||
frStep = frC - np.floor(frC)
|
||||
var_step = var_C - var_F
|
||||
var_p = var_F + var_step * frStep
|
||||
|
||||
if scan_motor == "gamma":
|
||||
gamma = var_p
|
||||
elif scan_motor == "omega":
|
||||
omega = var_p
|
||||
elif scan_motor == "nu":
|
||||
nu = var_p
|
||||
elif scan_motor == "chi":
|
||||
chi = var_p
|
||||
elif scan_motor == "phi":
|
||||
phi = var_p
|
||||
|
||||
intensity = det_data["fit"]["intensity"]
|
||||
x_pos = det_data["fit"]["x_pos"]
|
||||
y_pos = det_data["fit"]["y_pos"]
|
||||
|
||||
if det_data["zebra_mode"] == "nb":
|
||||
chi = None
|
||||
phi = None
|
||||
|
||||
events_data["wave"].append(wave)
|
||||
events_data["ddist"].append(ddist)
|
||||
events_data["cell"].append(cell)
|
||||
events_data["frame"].append(frC)
|
||||
events_data["x_pos"].append(x_pos)
|
||||
events_data["y_pos"].append(y_pos)
|
||||
events_data["intensity"].append(intensity)
|
||||
events_data["snr_cnts"].append(snr_cnts)
|
||||
events_data["gamma"].append(gamma)
|
||||
events_data["omega"].append(omega)
|
||||
events_data["chi"].append(chi)
|
||||
events_data["phi"].append(phi)
|
||||
events_data["nu"].append(nu)
|
||||
|
||||
events_table_source.data = events_data
|
||||
|
||||
add_event_button = Button(label="Add peak center", width=145)
|
||||
add_event_button.on_click(add_event_button_callback)
|
||||
|
||||
def remove_event_button_callback():
|
||||
ind2remove = events_table_source.selected.indices
|
||||
for value in events_data.values():
|
||||
for ind in reversed(ind2remove):
|
||||
del value[ind]
|
||||
|
||||
events_table_source.data = events_data
|
||||
|
||||
remove_event_button = Button(label="Remove peak center", width=145)
|
||||
remove_event_button.on_click(remove_event_button_callback)
|
||||
|
||||
metadata_table_source = ColumnDataSource(dict(geom=[""], temp=[None], mf=[None]))
|
||||
metadata_table = DataTable(
|
||||
source=metadata_table_source,
|
||||
columns=[
|
||||
TableColumn(field="geom", title="Geometry", width=100),
|
||||
TableColumn(field="temp", title="Temperature", formatter=num_formatter, width=100),
|
||||
TableColumn(field="mf", title="Magnetic Field", formatter=num_formatter, width=100),
|
||||
],
|
||||
width=300,
|
||||
height=50,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
temp_spinner = Spinner(title="Temperature:", format="0.00", width=145, disabled=True)
|
||||
geometry_textinput = TextInput(title="Geometry:", disabled=True)
|
||||
|
||||
# Final layout
|
||||
peak_tables = Tabs(
|
||||
tabs=[
|
||||
Panel(child=events_table, title="Actual peak center"),
|
||||
Panel(child=detcenter_table, title="Peak in the detector center"),
|
||||
]
|
||||
)
|
||||
|
||||
import_layout = column(
|
||||
data_source,
|
||||
upload_cami_div,
|
||||
upload_cami_button,
|
||||
upload_hdf_div,
|
||||
upload_hdf_button,
|
||||
file_select,
|
||||
file_open_button,
|
||||
)
|
||||
|
||||
layout_image = column(gridplot([[proj_v, None], [plot, proj_h]], merge_tools=False))
|
||||
colormap_layout = column(
|
||||
row(colormap),
|
||||
row(column(Spacer(height=19), auto_toggle), display_max_spinner, display_min_spinner),
|
||||
row(
|
||||
column(Spacer(height=19), proj_auto_toggle),
|
||||
proj_display_max_spinner,
|
||||
proj_display_min_spinner,
|
||||
),
|
||||
colormap,
|
||||
main_auto_checkbox,
|
||||
row(display_min_spinner, display_max_spinner),
|
||||
proj_auto_checkbox,
|
||||
row(proj_display_min_spinner, proj_display_max_spinner),
|
||||
)
|
||||
hkl_layout = column(geometry_textinput, hkl_button)
|
||||
params_layout = row(mf_spinner, temp_spinner)
|
||||
|
||||
layout_controls = row(
|
||||
column(selection_button, selection_list),
|
||||
Spacer(width=20),
|
||||
column(frame_button_group, colormap_layout),
|
||||
Spacer(width=20),
|
||||
column(index_spinner, params_layout, hkl_layout),
|
||||
layout_controls = column(
|
||||
row(metadata_table, index_spinner, column(Spacer(height=25), index_slider)),
|
||||
row(column(add_event_button, remove_event_button), peak_tables),
|
||||
)
|
||||
|
||||
layout_overview = column(
|
||||
@ -598,13 +865,8 @@ def create():
|
||||
)
|
||||
|
||||
tab_layout = row(
|
||||
column(
|
||||
row(
|
||||
proposal_textinput, filelist, Spacer(width=100), column(upload_div, upload_button),
|
||||
),
|
||||
layout_overview,
|
||||
layout_controls,
|
||||
),
|
||||
column(import_layout, colormap_layout),
|
||||
column(layout_overview, layout_controls),
|
||||
column(roi_avg_plot, layout_image),
|
||||
)
|
||||
|
||||
@ -643,15 +905,10 @@ def calculate_hkl(det_data, index):
|
||||
|
||||
|
||||
def calculate_pol(det_data, index):
|
||||
gamma = np.empty(shape=(IMAGE_H, IMAGE_W))
|
||||
nu = np.empty(shape=(IMAGE_H, IMAGE_W))
|
||||
|
||||
ddist = det_data["ddist"]
|
||||
gammad = det_data["gamma"][index]
|
||||
nud = det_data["nu"]
|
||||
|
||||
for xi in np.arange(IMAGE_W):
|
||||
for yi in np.arange(IMAGE_H):
|
||||
gamma[yi, xi], nu[yi, xi] = pyzebra.det2pol(ddist, gammad, nud, xi, yi)
|
||||
yi, xi = np.ogrid[:IMAGE_H, :IMAGE_W]
|
||||
gamma, nu = pyzebra.det2pol(ddist, gammad, nud, xi, yi)
|
||||
|
||||
return gamma, nu
|
||||
|
@ -6,11 +6,14 @@ import tempfile
|
||||
import types
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
BasicTicker,
|
||||
Button,
|
||||
CellEditor,
|
||||
CheckboxEditor,
|
||||
CheckboxGroup,
|
||||
ColumnDataSource,
|
||||
CustomJS,
|
||||
DataRange1d,
|
||||
@ -20,6 +23,7 @@ from bokeh.models import (
|
||||
FileInput,
|
||||
Grid,
|
||||
HoverTool,
|
||||
Image,
|
||||
Legend,
|
||||
Line,
|
||||
LinearAxis,
|
||||
@ -29,7 +33,7 @@ from bokeh.models import (
|
||||
Panel,
|
||||
PanTool,
|
||||
Plot,
|
||||
RadioButtonGroup,
|
||||
RadioGroup,
|
||||
ResetTool,
|
||||
Scatter,
|
||||
Select,
|
||||
@ -39,35 +43,37 @@ from bokeh.models import (
|
||||
TableColumn,
|
||||
Tabs,
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
Toggle,
|
||||
WheelZoomTool,
|
||||
Whisker,
|
||||
)
|
||||
from bokeh.palettes import Category10, Turbo256
|
||||
from bokeh.transform import linear_cmap
|
||||
from scipy import interpolate
|
||||
|
||||
import pyzebra
|
||||
from pyzebra.ccl_io import AREA_METHODS
|
||||
from pyzebra.ccl_process import AREA_METHODS
|
||||
|
||||
javaScript = """
|
||||
let j = 0;
|
||||
for (let i = 0; i < js_data.data['fname'].length; i++) {
|
||||
if (js_data.data['content'][i] === "") continue;
|
||||
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
setTimeout(function() {
|
||||
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
|
||||
const link = document.createElement('a');
|
||||
document.body.appendChild(link);
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
link.href = url;
|
||||
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
|
||||
link.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(link);
|
||||
}, 100 * j)
|
||||
|
||||
j++;
|
||||
}
|
||||
"""
|
||||
|
||||
PROPOSAL_PATH = "/afs/psi.ch/project/sinqdata/2020/zebra/"
|
||||
|
||||
|
||||
def color_palette(n_colors):
|
||||
palette = itertools.cycle(Category10[10])
|
||||
@ -75,139 +81,206 @@ def color_palette(n_colors):
|
||||
|
||||
|
||||
def create():
|
||||
doc = curdoc()
|
||||
det_data = []
|
||||
fit_params = {}
|
||||
js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""]))
|
||||
js_data = ColumnDataSource(data=dict(content=[""], fname=[""], ext=[""]))
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, new):
|
||||
full_proposal_path = os.path.join(PROPOSAL_PATH, new.strip())
|
||||
dat_file_list = []
|
||||
for file in os.listdir(full_proposal_path):
|
||||
if file.endswith(".dat"):
|
||||
dat_file_list.append((os.path.join(full_proposal_path, file), file))
|
||||
file_select.options = dat_file_list
|
||||
def file_select_update_for_proposal():
|
||||
proposal_path = proposal_textinput.name
|
||||
if proposal_path:
|
||||
file_list = []
|
||||
for file in os.listdir(proposal_path):
|
||||
if file.endswith((".ccl", ".dat")):
|
||||
file_list.append((os.path.join(proposal_path, file), file))
|
||||
file_select.options = file_list
|
||||
file_open_button.disabled = False
|
||||
file_append_button.disabled = False
|
||||
else:
|
||||
file_select.options = []
|
||||
file_open_button.disabled = True
|
||||
file_append_button.disabled = True
|
||||
|
||||
proposal_textinput = TextInput(title="Proposal number:", default_size=200)
|
||||
proposal_textinput.on_change("value", proposal_textinput_callback)
|
||||
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
|
||||
|
||||
def proposal_textinput_callback(_attr, _old, _new):
|
||||
file_select_update_for_proposal()
|
||||
|
||||
proposal_textinput = doc.proposal_textinput
|
||||
proposal_textinput.on_change("name", proposal_textinput_callback)
|
||||
|
||||
def _init_datatable():
|
||||
scan_list = [s["idx"] for s in det_data]
|
||||
export = [s["export"] for s in det_data]
|
||||
if param_select.value == "user defined":
|
||||
param = [None] * len(det_data)
|
||||
else:
|
||||
param = [scan[param_select.value] for scan in det_data]
|
||||
|
||||
file_list = []
|
||||
for scan in det_data:
|
||||
file_list.append(os.path.basename(scan["original_filename"]))
|
||||
|
||||
scan_table_source.data.update(
|
||||
file=file_list,
|
||||
scan=scan_list,
|
||||
param=[None] * len(scan_list),
|
||||
fit=[0] * len(scan_list),
|
||||
export=[True] * len(scan_list),
|
||||
file=file_list, scan=scan_list, param=param, fit=[0] * len(scan_list), export=export,
|
||||
)
|
||||
scan_table_source.selected.indices = []
|
||||
scan_table_source.selected.indices = [0]
|
||||
|
||||
param_select.value = "user defined"
|
||||
scan_motor_select.options = det_data[0]["scan_motors"]
|
||||
scan_motor_select.value = det_data[0]["scan_motor"]
|
||||
|
||||
def file_select_callback(_attr, _old, _new):
|
||||
pass
|
||||
merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)]
|
||||
merge_from_select.options = merge_options
|
||||
merge_from_select.value = merge_options[0][0]
|
||||
|
||||
file_select = MultiSelect(title="Available .dat files:", default_size=200, height=250)
|
||||
file_select.on_change("value", file_select_callback)
|
||||
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
|
||||
|
||||
def file_open_button_callback():
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
new_data = []
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
f_name = os.path.basename(f_path)
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
_init_datatable()
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
file_open_button = Button(label="Open New", default_size=100)
|
||||
if not new_data: # first file
|
||||
new_data = file_data
|
||||
pyzebra.merge_duplicates(new_data)
|
||||
js_data.data.update(fname=[base])
|
||||
else:
|
||||
pyzebra.merge_datasets(new_data, file_data)
|
||||
|
||||
if new_data:
|
||||
det_data = new_data
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
file_open_button = Button(label="Open New", width=100, disabled=True)
|
||||
file_open_button.on_click(file_open_button_callback)
|
||||
|
||||
def file_append_button_callback():
|
||||
for f_name in file_select.value:
|
||||
with open(f_name) as file:
|
||||
file_data = []
|
||||
for f_path in file_select.value:
|
||||
with open(f_path) as file:
|
||||
f_name = os.path.basename(f_path)
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
if file_data:
|
||||
_init_datatable()
|
||||
|
||||
file_append_button = Button(label="Append", default_size=100)
|
||||
file_append_button = Button(label="Append", width=100, disabled=True)
|
||||
file_append_button.on_click(file_append_button_callback)
|
||||
|
||||
def upload_button_callback(_attr, _old, new):
|
||||
def upload_button_callback(_attr, _old, _new):
|
||||
nonlocal det_data
|
||||
det_data = []
|
||||
for f_str, f_name in zip(new, upload_button.filename):
|
||||
new_data = []
|
||||
for f_str, f_name in zip(upload_button.value, upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
base, ext = os.path.splitext(f_name)
|
||||
if det_data:
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
else:
|
||||
det_data = pyzebra.parse_1D(file, ext)
|
||||
pyzebra.normalize_dataset(det_data, monitor_spinner.value)
|
||||
js_data.data.update(fname=[base + ".comm", base + ".incomm"])
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
_init_datatable()
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
|
||||
upload_div = Div(text="or upload new .dat files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".dat", multiple=True, default_size=200)
|
||||
upload_button.on_change("value", upload_button_callback)
|
||||
if not new_data: # first file
|
||||
new_data = file_data
|
||||
pyzebra.merge_duplicates(new_data)
|
||||
js_data.data.update(fname=[base])
|
||||
else:
|
||||
pyzebra.merge_datasets(new_data, file_data)
|
||||
|
||||
def append_upload_button_callback(_attr, _old, new):
|
||||
for f_str, f_name in zip(new, append_upload_button.filename):
|
||||
if new_data:
|
||||
det_data = new_data
|
||||
_init_datatable()
|
||||
append_upload_button.disabled = False
|
||||
|
||||
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
|
||||
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
|
||||
# for on_change("value", ...) or on_change("filename", ...),
|
||||
# see https://github.com/bokeh/bokeh/issues/11461
|
||||
upload_button.on_change("filename", upload_button_callback)
|
||||
|
||||
def append_upload_button_callback(_attr, _old, _new):
|
||||
file_data = []
|
||||
for f_str, f_name in zip(append_upload_button.value, append_upload_button.filename):
|
||||
with io.StringIO(base64.b64decode(f_str).decode()) as file:
|
||||
_, ext = os.path.splitext(f_name)
|
||||
append_data = pyzebra.parse_1D(file, ext)
|
||||
try:
|
||||
file_data = pyzebra.parse_1D(file, ext)
|
||||
except:
|
||||
print(f"Error loading {f_name}")
|
||||
continue
|
||||
|
||||
pyzebra.normalize_dataset(append_data, monitor_spinner.value)
|
||||
det_data.extend(append_data)
|
||||
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
|
||||
pyzebra.merge_datasets(det_data, file_data)
|
||||
|
||||
_init_datatable()
|
||||
if file_data:
|
||||
_init_datatable()
|
||||
|
||||
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
|
||||
append_upload_button = FileInput(accept=".dat", multiple=True, default_size=200)
|
||||
append_upload_button.on_change("value", append_upload_button_callback)
|
||||
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
|
||||
# for on_change("value", ...) or on_change("filename", ...),
|
||||
# see https://github.com/bokeh/bokeh/issues/11461
|
||||
append_upload_button.on_change("filename", append_upload_button_callback)
|
||||
|
||||
def monitor_spinner_callback(_attr, _old, new):
|
||||
if det_data:
|
||||
pyzebra.normalize_dataset(det_data, new)
|
||||
_update_plot()
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
|
||||
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
|
||||
monitor_spinner.on_change("value", monitor_spinner_callback)
|
||||
|
||||
def scan_motor_select_callback(_attr, _old, new):
|
||||
if det_data:
|
||||
for scan in det_data:
|
||||
scan["scan_motor"] = new
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
|
||||
scan_motor_select = Select(title="Scan motor:", options=[], width=145)
|
||||
scan_motor_select.on_change("value", scan_motor_select_callback)
|
||||
|
||||
def _update_table():
|
||||
fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
|
||||
scan_table_source.data.update(fit=fit_ok)
|
||||
export = [scan["export"] for scan in det_data]
|
||||
if param_select.value == "user defined":
|
||||
param = [None] * len(det_data)
|
||||
else:
|
||||
param = [scan[param_select.value] for scan in det_data]
|
||||
|
||||
def _update_plot():
|
||||
_update_single_scan_plot(_get_selected_scan())
|
||||
_update_overview()
|
||||
scan_table_source.data.update(fit=fit_ok, export=export, param=param)
|
||||
|
||||
def _update_single_scan_plot(scan):
|
||||
def _update_single_scan_plot():
|
||||
scan = _get_selected_scan()
|
||||
scan_motor = scan["scan_motor"]
|
||||
|
||||
y = scan["Counts"]
|
||||
y = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x = scan[scan_motor]
|
||||
|
||||
plot.axis[0].axis_label = scan_motor
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + np.sqrt(y), y_lower=y - np.sqrt(y))
|
||||
plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
|
||||
|
||||
fit = scan.get("fit")
|
||||
if fit is not None:
|
||||
@ -252,10 +325,10 @@ def create():
|
||||
scan_motor = scan["scan_motor"]
|
||||
xs.append(scan[scan_motor])
|
||||
x.extend(scan[scan_motor])
|
||||
ys.append(scan["Counts"])
|
||||
ys.append(scan["counts"])
|
||||
y.extend([float(p)] * len(scan[scan_motor]))
|
||||
param.append(float(p))
|
||||
par.extend(scan["Counts"])
|
||||
par.extend(scan["counts"])
|
||||
|
||||
if det_data:
|
||||
scan_motor = det_data[0]["scan_motor"]
|
||||
@ -269,6 +342,38 @@ def create():
|
||||
mapper["transform"].high = np.max([np.max(y) for y in ys])
|
||||
ov_param_plot_scatter_source.data.update(x=x, y=y, param=par)
|
||||
|
||||
try:
|
||||
interp_f = interpolate.interp2d(x, y, par)
|
||||
x1, x2 = min(x), max(x)
|
||||
y1, y2 = min(y), max(y)
|
||||
image = interp_f(
|
||||
np.linspace(x1, x2, ov_param_plot.inner_width // 10),
|
||||
np.linspace(y1, y2, ov_param_plot.inner_height // 10),
|
||||
assume_sorted=True,
|
||||
)
|
||||
ov_param_plot_image_source.data.update(
|
||||
image=[image], x=[x1], y=[y1], dw=[x2 - x1], dh=[y2 - y1]
|
||||
)
|
||||
except Exception:
|
||||
ov_param_plot_image_source.data.update(image=[], x=[], y=[], dw=[], dh=[])
|
||||
|
||||
def _update_param_plot():
|
||||
x = []
|
||||
y = []
|
||||
y_lower = []
|
||||
y_upper = []
|
||||
fit_param = fit_param_select.value
|
||||
for s, p in zip(det_data, scan_table_source.data["param"]):
|
||||
if "fit" in s and fit_param:
|
||||
x.append(p)
|
||||
param_fit_val = s["fit"].params[fit_param].value
|
||||
param_fit_std = s["fit"].params[fit_param].stderr
|
||||
y.append(param_fit_val)
|
||||
y_lower.append(param_fit_val - param_fit_std)
|
||||
y_upper.append(param_fit_val + param_fit_std)
|
||||
|
||||
param_plot_scatter_source.data.update(x=x, y=y, y_lower=y_lower, y_upper=y_upper)
|
||||
|
||||
# Main plot
|
||||
plot = Plot(
|
||||
x_range=DataRange1d(),
|
||||
@ -285,7 +390,7 @@ def create():
|
||||
|
||||
plot_scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
|
||||
plot_scatter = plot.add_glyph(
|
||||
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue")
|
||||
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
|
||||
)
|
||||
plot.add_layout(Whisker(source=plot_scatter_source, base="x", upper="y_upper", lower="y_lower"))
|
||||
|
||||
@ -297,7 +402,7 @@ def create():
|
||||
plot_bkg_source, Line(x="x", y="y", line_color="green", line_dash="dashed")
|
||||
)
|
||||
|
||||
plot_peak_source = ColumnDataSource(dict(xs=[0], ys=[0]))
|
||||
plot_peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
|
||||
plot_peak = plot.add_glyph(
|
||||
plot_peak_source, MultiLine(xs="xs", ys="ys", line_color="red", line_dash="dashed")
|
||||
)
|
||||
@ -325,7 +430,7 @@ def create():
|
||||
plot.toolbar.logo = None
|
||||
|
||||
# Overview multilines plot
|
||||
ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=450, plot_width=700)
|
||||
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
|
||||
ov_plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
|
||||
@ -344,7 +449,7 @@ def create():
|
||||
|
||||
# Overview perams plot
|
||||
ov_param_plot = Plot(
|
||||
x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700
|
||||
x_range=DataRange1d(), y_range=DataRange1d(), plot_height=450, plot_width=700
|
||||
)
|
||||
|
||||
ov_param_plot.add_layout(LinearAxis(axis_label="Param"), place="left")
|
||||
@ -353,6 +458,11 @@ def create():
|
||||
ov_param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
ov_param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
ov_param_plot_image_source = ColumnDataSource(dict(image=[], x=[], y=[], dw=[], dh=[]))
|
||||
ov_param_plot.add_glyph(
|
||||
ov_param_plot_image_source, Image(image="image", x="x", y="y", dw="dw", dh="dh")
|
||||
)
|
||||
|
||||
ov_param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[], param=[]))
|
||||
mapper = linear_cmap(field_name="param", palette=Turbo256, low=0, high=50)
|
||||
ov_param_plot.add_glyph(
|
||||
@ -363,12 +473,37 @@ def create():
|
||||
ov_param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
ov_param_plot.toolbar.logo = None
|
||||
|
||||
# Parameter plot
|
||||
param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
|
||||
|
||||
param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
|
||||
param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
|
||||
|
||||
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
|
||||
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
|
||||
|
||||
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[], y_upper=[], y_lower=[]))
|
||||
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
|
||||
param_plot.add_layout(
|
||||
Whisker(source=param_plot_scatter_source, base="x", upper="y_upper", lower="y_lower")
|
||||
)
|
||||
|
||||
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
|
||||
param_plot.toolbar.logo = None
|
||||
|
||||
def fit_param_select_callback(_attr, _old, _new):
|
||||
_update_param_plot()
|
||||
|
||||
fit_param_select = Select(title="Fit parameter", options=[], width=145)
|
||||
fit_param_select.on_change("value", fit_param_select_callback)
|
||||
|
||||
# Plot tabs
|
||||
plots = Tabs(
|
||||
tabs=[
|
||||
Panel(child=plot, title="single scan"),
|
||||
Panel(child=ov_plot, title="overview"),
|
||||
Panel(child=ov_param_plot, title="overview map"),
|
||||
Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"),
|
||||
]
|
||||
)
|
||||
|
||||
@ -388,59 +523,87 @@ def create():
|
||||
# skip unnecessary update caused by selection drop
|
||||
return
|
||||
|
||||
_update_plot()
|
||||
_update_single_scan_plot()
|
||||
|
||||
def scan_table_source_callback(_attr, _old, new):
|
||||
# unfortunately, we don't know if the change comes from data update or user input
|
||||
# also `old` and `new` are the same for non-scalars
|
||||
for scan, export in zip(det_data, new["export"]):
|
||||
scan["export"] = export
|
||||
_update_overview()
|
||||
_update_param_plot()
|
||||
_update_preview()
|
||||
|
||||
scan_table_source = ColumnDataSource(dict(file=[], scan=[], param=[], fit=[], export=[]))
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
|
||||
scan_table = DataTable(
|
||||
source=scan_table_source,
|
||||
columns=[
|
||||
TableColumn(field="file", title="file", width=150),
|
||||
TableColumn(field="scan", title="scan", width=50),
|
||||
TableColumn(field="file", title="file", editor=CellEditor(), width=150),
|
||||
TableColumn(field="scan", title="scan", editor=CellEditor(), width=50),
|
||||
TableColumn(field="param", title="param", editor=NumberEditor(), width=50),
|
||||
TableColumn(field="fit", title="Fit", width=50),
|
||||
TableColumn(field="fit", title="Fit", editor=CellEditor(), width=50),
|
||||
TableColumn(field="export", title="Export", editor=CheckboxEditor(), width=50),
|
||||
],
|
||||
width=410, # +60 because of the index column
|
||||
height=350,
|
||||
editable=True,
|
||||
autosize_mode="none",
|
||||
)
|
||||
|
||||
def scan_table_source_callback(_attr, _old, _new):
|
||||
if scan_table_source.selected.indices:
|
||||
_update_plot()
|
||||
merge_from_select = Select(title="scan:", width=145)
|
||||
|
||||
scan_table_source.selected.on_change("indices", scan_table_select_callback)
|
||||
scan_table_source.on_change("data", scan_table_source_callback)
|
||||
def merge_button_callback():
|
||||
scan_into = _get_selected_scan()
|
||||
scan_from = det_data[int(merge_from_select.value)]
|
||||
|
||||
if scan_into is scan_from:
|
||||
print("WARNING: Selected scans for merging are identical")
|
||||
return
|
||||
|
||||
pyzebra.merge_scans(scan_into, scan_from)
|
||||
_update_table()
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
|
||||
merge_button = Button(label="Merge into current", width=145)
|
||||
merge_button.on_click(merge_button_callback)
|
||||
|
||||
def restore_button_callback():
|
||||
pyzebra.restore_scan(_get_selected_scan())
|
||||
_update_table()
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
|
||||
restore_button = Button(label="Restore scan", width=145)
|
||||
restore_button.on_click(restore_button_callback)
|
||||
|
||||
def _get_selected_scan():
|
||||
return det_data[scan_table_source.selected.indices[0]]
|
||||
|
||||
def param_select_callback(_attr, _old, new):
|
||||
if new == "user defined":
|
||||
param = [None] * len(det_data)
|
||||
else:
|
||||
param = [scan[new] for scan in det_data]
|
||||
|
||||
scan_table_source.data["param"] = param
|
||||
def param_select_callback(_attr, _old, _new):
|
||||
_update_table()
|
||||
|
||||
param_select = Select(
|
||||
title="Parameter:",
|
||||
options=["user defined", "temp", "mf", "h", "k", "l"],
|
||||
value="user defined",
|
||||
default_size=145,
|
||||
width=145,
|
||||
)
|
||||
param_select.on_change("value", param_select_callback)
|
||||
|
||||
def fit_from_spinner_callback(_attr, _old, new):
|
||||
fit_from_span.location = new
|
||||
|
||||
fit_from_spinner = Spinner(title="Fit from:", default_size=145)
|
||||
fit_from_spinner = Spinner(title="Fit from:", width=145)
|
||||
fit_from_spinner.on_change("value", fit_from_spinner_callback)
|
||||
|
||||
def fit_to_spinner_callback(_attr, _old, new):
|
||||
fit_to_span.location = new
|
||||
|
||||
fit_to_spinner = Spinner(title="to:", default_size=145)
|
||||
fit_to_spinner = Spinner(title="to:", width=145)
|
||||
fit_to_spinner.on_change("value", fit_to_spinner_callback)
|
||||
|
||||
def fitparams_add_dropdown_callback(click):
|
||||
@ -459,7 +622,7 @@ def create():
|
||||
("Pseudo Voigt", "pvoigt"),
|
||||
# ("Pseudo Voigt1", "pseudovoigt1"),
|
||||
],
|
||||
default_size=145,
|
||||
width=145,
|
||||
)
|
||||
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
|
||||
|
||||
@ -479,7 +642,7 @@ def create():
|
||||
else:
|
||||
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
|
||||
fitparams_select = MultiSelect(options=[], height=120, default_size=145)
|
||||
fitparams_select = MultiSelect(options=[], height=120, width=145)
|
||||
fitparams_select.tags = [0]
|
||||
fitparams_select.on_change("value", fitparams_select_callback)
|
||||
|
||||
@ -494,7 +657,7 @@ def create():
|
||||
|
||||
fitparams_select.value = []
|
||||
|
||||
fitparams_remove_button = Button(label="Remove fit function", default_size=145)
|
||||
fitparams_remove_button = Button(label="Remove fit function", width=145)
|
||||
fitparams_remove_button.on_click(fitparams_remove_button_callback)
|
||||
|
||||
def fitparams_factory(function):
|
||||
@ -516,13 +679,21 @@ def create():
|
||||
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
|
||||
)
|
||||
|
||||
if function == "linear":
|
||||
fitparams["value"] = [0, 1]
|
||||
fitparams["vary"] = [False, True]
|
||||
fitparams["min"] = [None, 0]
|
||||
|
||||
elif function == "gaussian":
|
||||
fitparams["min"] = [0, None, None]
|
||||
|
||||
return fitparams
|
||||
|
||||
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
|
||||
fitparams_table = DataTable(
|
||||
source=fitparams_table_source,
|
||||
columns=[
|
||||
TableColumn(field="param", title="Parameter"),
|
||||
TableColumn(field="param", title="Parameter", editor=CellEditor()),
|
||||
TableColumn(field="value", title="Value", editor=NumberEditor()),
|
||||
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
|
||||
TableColumn(field="min", title="Min", editor=NumberEditor()),
|
||||
@ -542,95 +713,109 @@ def create():
|
||||
|
||||
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
|
||||
|
||||
def fit_all_button_callback():
|
||||
for scan, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
def proc_all_button_callback():
|
||||
for scan in det_data:
|
||||
if scan["export"]:
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
_update_table()
|
||||
|
||||
fit_all_button = Button(label="Fit All", button_type="primary", default_size=145)
|
||||
fit_all_button.on_click(fit_all_button_callback)
|
||||
for scan in det_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].params.keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
def fit_button_callback():
|
||||
proc_all_button = Button(label="Process All", button_type="primary", width=145)
|
||||
proc_all_button.on_click(proc_all_button_callback)
|
||||
|
||||
def proc_button_callback():
|
||||
scan = _get_selected_scan()
|
||||
pyzebra.fit_scan(
|
||||
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
|
||||
)
|
||||
pyzebra.get_area(
|
||||
scan,
|
||||
area_method=AREA_METHODS[area_method_radiobutton.active],
|
||||
lorentz=lorentz_checkbox.active,
|
||||
)
|
||||
|
||||
_update_plot()
|
||||
_update_single_scan_plot()
|
||||
_update_overview()
|
||||
_update_table()
|
||||
|
||||
fit_button = Button(label="Fit Current", default_size=145)
|
||||
fit_button.on_click(fit_button_callback)
|
||||
for scan in det_data:
|
||||
if "fit" in scan:
|
||||
options = list(scan["fit"].params.keys())
|
||||
fit_param_select.options = options
|
||||
fit_param_select.value = options[0]
|
||||
break
|
||||
|
||||
area_method_radiobutton = RadioButtonGroup(
|
||||
labels=["Fit area", "Int area"], active=0, default_size=145, disabled=True
|
||||
)
|
||||
proc_button = Button(label="Process Current", width=145)
|
||||
proc_button.on_click(proc_button_callback)
|
||||
|
||||
bin_size_spinner = Spinner(
|
||||
title="Bin size:", value=1, low=1, step=1, default_size=145, disabled=True
|
||||
)
|
||||
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
|
||||
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
|
||||
|
||||
lorentz_toggle = Toggle(label="Lorentz Correction", default_size=145)
|
||||
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
|
||||
|
||||
export_preview_textinput = TextAreaInput(title="Export preview:", width=450, height=400)
|
||||
export_preview_textinput = TextAreaInput(title="Export file preview:", width=450, height=400)
|
||||
|
||||
def preview_button_callback():
|
||||
def _update_preview():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_file = temp_dir + "/temp"
|
||||
export_data = []
|
||||
for s, export in zip(det_data, scan_table_source.data["export"]):
|
||||
if export:
|
||||
export_data.append(s)
|
||||
param_data = []
|
||||
for scan, param in zip(det_data, scan_table_source.data["param"]):
|
||||
if scan["export"] and param:
|
||||
export_data.append(scan)
|
||||
param_data.append(param)
|
||||
|
||||
pyzebra.export_1D(
|
||||
export_data,
|
||||
temp_file,
|
||||
area_method=AREA_METHODS[int(area_method_radiobutton.active)],
|
||||
lorentz=lorentz_toggle.active,
|
||||
)
|
||||
pyzebra.export_param_study(export_data, param_data, temp_file)
|
||||
|
||||
exported_content = ""
|
||||
file_content = []
|
||||
for ext in (".comm", ".incomm"):
|
||||
fname = temp_file + ext
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
content = f.read()
|
||||
exported_content += f"{ext} file:\n" + content
|
||||
else:
|
||||
content = ""
|
||||
file_content.append(content)
|
||||
|
||||
fname = temp_file
|
||||
if os.path.isfile(fname):
|
||||
with open(fname) as f:
|
||||
content = f.read()
|
||||
exported_content += content
|
||||
else:
|
||||
content = ""
|
||||
file_content.append(content)
|
||||
|
||||
js_data.data.update(content=file_content)
|
||||
export_preview_textinput.value = exported_content
|
||||
|
||||
preview_button = Button(label="Preview", default_size=220)
|
||||
preview_button.on_click(preview_button_callback)
|
||||
|
||||
save_button = Button(label="Download preview", button_type="success", default_size=220)
|
||||
save_button = Button(label="Download File", button_type="success", width=220)
|
||||
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
|
||||
|
||||
fitpeak_controls = row(
|
||||
column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
|
||||
fitparams_table,
|
||||
Spacer(width=20),
|
||||
column(
|
||||
row(fit_from_spinner, fit_to_spinner),
|
||||
row(bin_size_spinner, column(Spacer(height=19), lorentz_toggle)),
|
||||
row(area_method_radiobutton),
|
||||
row(fit_button, fit_all_button),
|
||||
),
|
||||
column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
|
||||
column(fit_to_spinner, proc_button, proc_all_button),
|
||||
)
|
||||
|
||||
scan_layout = column(scan_table, row(monitor_spinner, param_select))
|
||||
scan_layout = column(
|
||||
scan_table,
|
||||
row(monitor_spinner, scan_motor_select, param_select),
|
||||
row(column(Spacer(height=19), row(restore_button, merge_button)), merge_from_select),
|
||||
)
|
||||
|
||||
import_layout = column(
|
||||
proposal_textinput,
|
||||
file_select,
|
||||
row(file_open_button, file_append_button),
|
||||
upload_div,
|
||||
@ -639,7 +824,7 @@ def create():
|
||||
append_upload_button,
|
||||
)
|
||||
|
||||
export_layout = column(export_preview_textinput, row(preview_button, save_button))
|
||||
export_layout = column(export_preview_textinput, row(save_button))
|
||||
|
||||
tab_layout = column(
|
||||
row(import_layout, scan_layout, plots, Spacer(width=30), export_layout),
|
||||
|
@ -1,11 +1,9 @@
|
||||
import ast
|
||||
import math
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
|
||||
import numpy as np
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import (
|
||||
Button,
|
||||
@ -17,33 +15,35 @@ from bokeh.models import (
|
||||
TextAreaInput,
|
||||
TextInput,
|
||||
)
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
|
||||
def create():
|
||||
path_prefix_textinput = TextInput(title="Path prefix:", value="")
|
||||
selection_list = TextAreaInput(title="ROIs:", rows=7)
|
||||
lattice_const_textinput = TextInput(
|
||||
title="Lattice constants:", value="8.3211,8.3211,8.3211,90.00,90.00,90.00"
|
||||
)
|
||||
max_res_spinner = Spinner(title="max-res", value=2, step=0.01)
|
||||
seed_pool_size_spinner = Spinner(title="seed-pool-size", value=5, step=0.01)
|
||||
seed_len_tol_spinner = Spinner(title="seed-len-tol", value=0.02, step=0.01)
|
||||
seed_angle_tol_spinner = Spinner(title="seed-angle-tol", value=1, step=0.01)
|
||||
eval_hkl_tol_spinner = Spinner(title="eval-hkl-tol", value=0.15, step=0.01)
|
||||
doc = curdoc()
|
||||
events_data = doc.events_data
|
||||
|
||||
npeaks_spinner = Spinner(title="Number of peaks from hdf_view panel:", disabled=True)
|
||||
lattice_const_textinput = TextInput(title="Lattice constants:")
|
||||
max_res_spinner = Spinner(title="max-res:", value=2, step=0.01, width=145)
|
||||
seed_pool_size_spinner = Spinner(title="seed-pool-size:", value=5, step=0.01, width=145)
|
||||
seed_len_tol_spinner = Spinner(title="seed-len-tol:", value=0.02, step=0.01, width=145)
|
||||
seed_angle_tol_spinner = Spinner(title="seed-angle-tol:", value=1, step=0.01, width=145)
|
||||
eval_hkl_tol_spinner = Spinner(title="eval-hkl-tol:", value=0.15, step=0.01, width=145)
|
||||
|
||||
diff_vec = []
|
||||
ub_matrices = []
|
||||
|
||||
def process_button_callback():
|
||||
# drop table selection to clear result fields
|
||||
results_table_source.selected.indices = []
|
||||
|
||||
nonlocal diff_vec
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_peak_list_dir = os.path.join(temp_dir, "peak_list")
|
||||
os.mkdir(temp_peak_list_dir)
|
||||
temp_event_file = os.path.join(temp_peak_list_dir, "event-0.txt")
|
||||
temp_hkl_file = os.path.join(temp_dir, "hkl.h5")
|
||||
roi_dict = ast.literal_eval(selection_list.value)
|
||||
|
||||
comp_proc = subprocess.run(
|
||||
[
|
||||
@ -51,7 +51,7 @@ def create():
|
||||
"-n",
|
||||
"2",
|
||||
"python",
|
||||
"spind/gen_hkl_table.py",
|
||||
os.path.join(doc.spind_path, "gen_hkl_table.py"),
|
||||
lattice_const_textinput.value,
|
||||
"--max-res",
|
||||
str(max_res_spinner.value),
|
||||
@ -66,7 +66,37 @@ def create():
|
||||
print(" ".join(comp_proc.args))
|
||||
print(comp_proc.stdout)
|
||||
|
||||
diff_vec = prepare_event_file(temp_event_file, roi_dict, path_prefix_textinput.value)
|
||||
# prepare an event file
|
||||
diff_vec = []
|
||||
with open(temp_event_file, "w") as f:
|
||||
npeaks = len(next(iter(doc.events_data.values())))
|
||||
for ind in range(npeaks):
|
||||
wave = events_data["wave"][ind]
|
||||
ddist = events_data["ddist"][ind]
|
||||
x_pos = events_data["x_pos"][ind]
|
||||
y_pos = events_data["y_pos"][ind]
|
||||
intensity = events_data["intensity"][ind]
|
||||
snr_cnts = events_data["snr_cnts"][ind]
|
||||
gamma = events_data["gamma"][ind]
|
||||
omega = events_data["omega"][ind]
|
||||
chi = events_data["chi"][ind]
|
||||
phi = events_data["phi"][ind]
|
||||
nu = events_data["nu"][ind]
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, gamma, nu, x_pos, y_pos)
|
||||
diff_vector = pyzebra.z1frmd(wave, ga, omega, chi, phi, nu)
|
||||
d_spacing = float(pyzebra.dandth(wave, diff_vector)[0])
|
||||
diff_vector = diff_vector.flatten() * 1e10
|
||||
dv1, dv2, dv3 = diff_vector
|
||||
|
||||
diff_vec.append(diff_vector)
|
||||
f.write(
|
||||
f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n"
|
||||
)
|
||||
|
||||
print(f"Content of {temp_event_file}:")
|
||||
with open(temp_event_file) as f:
|
||||
print(f.read())
|
||||
|
||||
comp_proc = subprocess.run(
|
||||
[
|
||||
@ -74,7 +104,7 @@ def create():
|
||||
"-n",
|
||||
"2",
|
||||
"python",
|
||||
"spind/SPIND.py",
|
||||
os.path.join(doc.spind_path, "SPIND.py"),
|
||||
temp_peak_list_dir,
|
||||
temp_hkl_file,
|
||||
"-o",
|
||||
@ -96,9 +126,12 @@ def create():
|
||||
print(" ".join(comp_proc.args))
|
||||
print(comp_proc.stdout)
|
||||
|
||||
spind_out_file = os.path.join(temp_dir, "spind.txt")
|
||||
spind_res = dict(
|
||||
label=[], crystal_id=[], match_rate=[], matched_peaks=[], column_5=[], ub_matrix=[],
|
||||
)
|
||||
try:
|
||||
with open(os.path.join(temp_dir, "spind.txt")) as f_out:
|
||||
spind_res = defaultdict(list)
|
||||
with open(spind_out_file) as f_out:
|
||||
for line in f_out:
|
||||
c1, c2, c3, c4, c5, *c_rest = line.split()
|
||||
spind_res["label"].append(c1)
|
||||
@ -109,32 +142,45 @@ def create():
|
||||
|
||||
# last digits are spind UB matrix
|
||||
vals = list(map(float, c_rest))
|
||||
ub_matrix_spind = np.array(vals).reshape(3, 3)
|
||||
ub_matrix = np.linalg.inv(np.transpose(ub_matrix_spind)) * 1e10
|
||||
spind_res["ub_matrix"].append(ub_matrix)
|
||||
ub_matrix_spind = np.transpose(np.array(vals).reshape(3, 3))
|
||||
ub_matrix = np.linalg.inv(ub_matrix_spind)
|
||||
ub_matrices.append(ub_matrix)
|
||||
spind_res["ub_matrix"].append(str(ub_matrix_spind * 1e-10))
|
||||
|
||||
results_table_source.data.update(spind_res)
|
||||
print(f"Content of {spind_out_file}:")
|
||||
with open(spind_out_file) as f:
|
||||
print(f.read())
|
||||
|
||||
except FileNotFoundError:
|
||||
print("No results from spind")
|
||||
|
||||
results_table_source.data.update(spind_res)
|
||||
|
||||
process_button = Button(label="Process", button_type="primary")
|
||||
process_button.on_click(process_button_callback)
|
||||
|
||||
hkl_textareainput = TextAreaInput(title="hkl values:", rows=7)
|
||||
if doc.spind_path is None:
|
||||
process_button.disabled = True
|
||||
|
||||
ub_matrix_textareainput = TextAreaInput(title="UB matrix:", rows=7, width=400)
|
||||
hkl_textareainput = TextAreaInput(title="hkl values:", rows=7, width=400)
|
||||
|
||||
def results_table_select_callback(_attr, old, new):
|
||||
if new:
|
||||
ind = new[0]
|
||||
ub_matrix = results_table_source.data["ub_matrix"][ind]
|
||||
ub_matrix = ub_matrices[ind]
|
||||
res = ""
|
||||
for vec in diff_vec:
|
||||
res += f"{vec @ ub_matrix}\n"
|
||||
res += f"{ub_matrix @ vec}\n"
|
||||
ub_matrix_textareainput.value = str(ub_matrix * 1e10)
|
||||
hkl_textareainput.value = res
|
||||
else:
|
||||
hkl_textareainput.value = None
|
||||
ub_matrix_textareainput.value = ""
|
||||
hkl_textareainput.value = ""
|
||||
|
||||
results_table_source = ColumnDataSource(dict())
|
||||
results_table_source = ColumnDataSource(
|
||||
dict(label=[], crystal_id=[], match_rate=[], matched_peaks=[], column_5=[], ub_matrix=[])
|
||||
)
|
||||
results_table = DataTable(
|
||||
source=results_table_source,
|
||||
columns=[
|
||||
@ -143,10 +189,10 @@ def create():
|
||||
TableColumn(field="match_rate", title="Match Rate", width=100),
|
||||
TableColumn(field="matched_peaks", title="Matched Peaks", width=100),
|
||||
TableColumn(field="column_5", title="", width=100),
|
||||
TableColumn(field="ub_matrix", title="UB Matrix", width=250),
|
||||
TableColumn(field="ub_matrix", title="UB Matrix", width=700),
|
||||
],
|
||||
height=300,
|
||||
width=700,
|
||||
width=1200,
|
||||
autosize_mode="none",
|
||||
index_position=None,
|
||||
)
|
||||
@ -155,99 +201,23 @@ def create():
|
||||
|
||||
tab_layout = row(
|
||||
column(
|
||||
path_prefix_textinput,
|
||||
selection_list,
|
||||
npeaks_spinner,
|
||||
lattice_const_textinput,
|
||||
max_res_spinner,
|
||||
seed_pool_size_spinner,
|
||||
seed_len_tol_spinner,
|
||||
seed_angle_tol_spinner,
|
||||
eval_hkl_tol_spinner,
|
||||
row(max_res_spinner, seed_pool_size_spinner),
|
||||
row(seed_len_tol_spinner, seed_angle_tol_spinner),
|
||||
row(eval_hkl_tol_spinner),
|
||||
process_button,
|
||||
),
|
||||
column(results_table, row(hkl_textareainput)),
|
||||
column(results_table, row(ub_matrix_textareainput, hkl_textareainput)),
|
||||
)
|
||||
|
||||
async def update_npeaks_spinner():
|
||||
npeaks = len(next(iter(doc.events_data.values())))
|
||||
npeaks_spinner.value = npeaks
|
||||
# TODO: check cell parameter for consistency?
|
||||
if npeaks:
|
||||
lattice_const_textinput.value = ",".join(map(str, doc.events_data["cell"][0]))
|
||||
|
||||
doc.add_periodic_callback(update_npeaks_spinner, 1000)
|
||||
|
||||
return Panel(child=tab_layout, title="spind")
|
||||
|
||||
|
||||
def gauss(x, *p):
|
||||
"""Defines Gaussian function
|
||||
Args:
|
||||
A - amplitude, mu - position of the center, sigma - width
|
||||
Returns:
|
||||
Gaussian function
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
||||
|
||||
|
||||
def prepare_event_file(export_filename, roi_dict, path_prefix=""):
|
||||
diff_vec = []
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
maxfev = 100000
|
||||
with open(export_filename, "w") as f:
|
||||
for file, rois in roi_dict.items():
|
||||
dat = pyzebra.read_detector_data(path_prefix + file + ".hdf")
|
||||
|
||||
wave = dat["wave"]
|
||||
ddist = dat["ddist"]
|
||||
|
||||
gamma = dat["gamma"][0]
|
||||
omega = dat["omega"][0]
|
||||
nu = dat["nu"][0]
|
||||
chi = dat["chi"][0]
|
||||
phi = dat["phi"][0]
|
||||
|
||||
scan_motor = dat["scan_motor"]
|
||||
var_angle = dat[scan_motor]
|
||||
|
||||
for roi in rois:
|
||||
x0, xN, y0, yN, fr0, frN = roi
|
||||
data_roi = dat["data"][fr0:frN, y0:yN, x0:xN]
|
||||
|
||||
cnts = np.sum(data_roi, axis=(1, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(cnts)), cnts, p0=p0, maxfev=maxfev)
|
||||
|
||||
m = cnts.mean()
|
||||
sd = cnts.std()
|
||||
snr_cnts = np.where(sd == 0, 0, m / sd)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
var_F = var_angle[math.floor(frC)]
|
||||
var_C = var_angle[math.ceil(frC)]
|
||||
frStep = frC - math.floor(frC)
|
||||
var_step = var_C - var_F
|
||||
var_p = var_F + var_step * frStep
|
||||
|
||||
if scan_motor == "gamma":
|
||||
gamma = var_p
|
||||
elif scan_motor == "omega":
|
||||
omega = var_p
|
||||
elif scan_motor == "nu":
|
||||
nu = var_p
|
||||
elif scan_motor == "chi":
|
||||
chi = var_p
|
||||
elif scan_motor == "phi":
|
||||
phi = var_p
|
||||
|
||||
intensity = coeff[1] * abs(coeff[2] * var_step) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
|
||||
projX = np.sum(data_roi, axis=(0, 1))
|
||||
coeff, _ = curve_fit(gauss, range(len(projX)), projX, p0=p0, maxfev=maxfev)
|
||||
x_pos = x0 + coeff[1]
|
||||
|
||||
projY = np.sum(data_roi, axis=(0, 2))
|
||||
coeff, _ = curve_fit(gauss, range(len(projY)), projY, p0=p0, maxfev=maxfev)
|
||||
y_pos = y0 + coeff[1]
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, gamma, nu, x_pos, y_pos)
|
||||
diff_vector = pyzebra.z1frmd(wave, ga, omega, chi, phi, nu)
|
||||
d_spacing = float(pyzebra.dandth(wave, diff_vector)[0])
|
||||
dv1, dv2, dv3 = diff_vector.flatten() * 1e10
|
||||
|
||||
diff_vec.append(diff_vector.flatten())
|
||||
|
||||
f.write(f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n")
|
||||
|
||||
return diff_vec
|
||||
|
@ -76,7 +76,7 @@ CCL_SECOND_LINE = (
|
||||
("scan_motor", str),
|
||||
)
|
||||
|
||||
AREA_METHODS = ("fit_area", "int_area")
|
||||
EXPORT_TARGETS = {"fullprof": (".comm", ".incomm"), "jana": (".col", ".incol")}
|
||||
|
||||
|
||||
def load_1D(filepath):
|
||||
@ -144,6 +144,7 @@ def parse_1D(fileobj, data_type):
|
||||
continue
|
||||
|
||||
s = {}
|
||||
s["export"] = True
|
||||
|
||||
# first line
|
||||
for param, (param_name, param_type) in zip(line.split(), ccl_first_line):
|
||||
@ -159,6 +160,7 @@ def parse_1D(fileobj, data_type):
|
||||
|
||||
# "om" -> "omega"
|
||||
s["scan_motor"] = "omega"
|
||||
s["scan_motors"] = ["omega", ]
|
||||
# overwrite metadata, because it only refers to the scan center
|
||||
half_dist = (s["n_points"] - 1) / 2 * s["angle_step"]
|
||||
s["omega"] = np.linspace(s["omega"] - half_dist, s["omega"] + half_dist, s["n_points"])
|
||||
@ -167,7 +169,8 @@ def parse_1D(fileobj, data_type):
|
||||
counts = []
|
||||
while len(counts) < s["n_points"]:
|
||||
counts.extend(map(float, next(fileobj).split()))
|
||||
s["Counts"] = np.array(counts)
|
||||
s["counts"] = np.array(counts)
|
||||
s["counts_err"] = np.sqrt(s["counts"])
|
||||
|
||||
if s["h"].is_integer() and s["k"].is_integer() and s["l"].is_integer():
|
||||
s["h"], s["k"], s["l"] = map(int, (s["h"], s["k"], s["l"]))
|
||||
@ -180,25 +183,19 @@ def parse_1D(fileobj, data_type):
|
||||
metadata["gamma"] = metadata["twotheta"]
|
||||
|
||||
s = defaultdict(list)
|
||||
s["export"] = True
|
||||
|
||||
match = re.search("Scanning Variables: (.*), Steps: (.*)", next(fileobj))
|
||||
if match.group(1) == "h, k, l":
|
||||
steps = match.group(2).split()
|
||||
for step, ind in zip(steps, "hkl"):
|
||||
if float(step) != 0:
|
||||
scan_motor = ind
|
||||
break
|
||||
else:
|
||||
scan_motor = match.group(1)
|
||||
|
||||
s["scan_motor"] = scan_motor
|
||||
motors = [motor.lower() for motor in match.group(1).split(", ")]
|
||||
steps = [float(step) for step in match.group(2).split()]
|
||||
|
||||
match = re.search("(.*) Points, Mode: (.*), Preset (.*)", next(fileobj))
|
||||
if match.group(2) != "Monitor":
|
||||
raise Exception("Unknown mode in dat file.")
|
||||
s["n_points"] = int(match.group(1))
|
||||
s["monitor"] = float(match.group(3))
|
||||
|
||||
col_names = next(fileobj).split()
|
||||
col_names = list(map(str.lower, next(fileobj).split()))
|
||||
|
||||
for line in fileobj:
|
||||
if "END-OF-DATA" in line:
|
||||
@ -211,21 +208,33 @@ def parse_1D(fileobj, data_type):
|
||||
for name in col_names:
|
||||
s[name] = np.array(s[name])
|
||||
|
||||
s["counts_err"] = np.sqrt(s["counts"])
|
||||
|
||||
s["scan_motors"] = []
|
||||
for motor, step in zip(motors, steps):
|
||||
if step == 0:
|
||||
# it's not a scan motor, so keep only the median value
|
||||
s[motor] = np.median(s[motor])
|
||||
else:
|
||||
s["scan_motors"].append(motor)
|
||||
|
||||
# "om" -> "omega"
|
||||
if s["scan_motor"] == "om":
|
||||
s["scan_motor"] = "omega"
|
||||
if "om" in s["scan_motors"]:
|
||||
s["scan_motors"][s["scan_motors"].index("om")] = "omega"
|
||||
s["omega"] = s["om"]
|
||||
del s["om"]
|
||||
|
||||
# "tt" -> "temp"
|
||||
elif s["scan_motor"] == "tt":
|
||||
s["scan_motor"] = "temp"
|
||||
if "tt" in s["scan_motors"]:
|
||||
s["scan_motors"][s["scan_motors"].index("tt")] = "temp"
|
||||
s["temp"] = s["tt"]
|
||||
del s["tt"]
|
||||
|
||||
# "mf" stays "mf"
|
||||
# "phi" stays "phi"
|
||||
|
||||
s["scan_motor"] = s["scan_motors"][0]
|
||||
|
||||
if "h" not in s:
|
||||
s["h"] = s["k"] = s["l"] = float("nan")
|
||||
|
||||
@ -243,14 +252,19 @@ def parse_1D(fileobj, data_type):
|
||||
return scan
|
||||
|
||||
|
||||
def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precision=2):
|
||||
"""Exports data in the .comm/.incomm format
|
||||
def export_1D(data, path, export_target, hkl_precision=2):
|
||||
"""Exports data in the .comm/.incomm format for fullprof or .col/.incol format for jana.
|
||||
|
||||
Scans with integer/real hkl values are saved in .comm/.incomm files correspondingly. If no scans
|
||||
are present for a particular output format, that file won't be created.
|
||||
Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files
|
||||
correspondingly. If no scans are present for a particular output format, that file won't be
|
||||
created.
|
||||
"""
|
||||
if export_target not in EXPORT_TARGETS:
|
||||
raise ValueError(f"Unknown export target: {export_target}.")
|
||||
|
||||
zebra_mode = data[0]["zebra_mode"]
|
||||
file_content = {".comm": [], ".incomm": []}
|
||||
exts = EXPORT_TARGETS[export_target]
|
||||
file_content = {ext: [] for ext in exts}
|
||||
|
||||
for scan in data:
|
||||
if "fit" not in scan:
|
||||
@ -261,36 +275,11 @@ def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precis
|
||||
h, k, l = scan["h"], scan["k"], scan["l"]
|
||||
hkl_are_integers = isinstance(h, int) # if True, other indices are of type 'int' too
|
||||
if hkl_are_integers:
|
||||
hkl_str = f"{h:6}{k:6}{l:6}"
|
||||
hkl_str = f"{h:4}{k:4}{l:4}"
|
||||
else:
|
||||
hkl_str = f"{h:8.{hkl_precision}f}{k:8.{hkl_precision}f}{l:8.{hkl_precision}f}"
|
||||
|
||||
for name, param in scan["fit"].params.items():
|
||||
if "amplitude" in name:
|
||||
area_n = param.value
|
||||
area_s = param.stderr
|
||||
break
|
||||
else:
|
||||
area_n = 0
|
||||
area_s = 0
|
||||
|
||||
if area_n is None or area_s is None:
|
||||
print(f"Couldn't export scan: {scan['idx']}")
|
||||
continue
|
||||
|
||||
# apply lorentz correction to area
|
||||
if lorentz:
|
||||
if zebra_mode == "bi":
|
||||
twotheta = np.deg2rad(scan["twotheta"])
|
||||
corr_factor = np.sin(twotheta)
|
||||
else: # zebra_mode == "nb":
|
||||
gamma = np.deg2rad(scan["gamma"])
|
||||
nu = np.deg2rad(scan["nu"])
|
||||
corr_factor = np.sin(gamma) * np.cos(nu)
|
||||
|
||||
area_n = np.abs(area_n * corr_factor)
|
||||
area_s = np.abs(area_s * corr_factor)
|
||||
|
||||
area_n, area_s = scan["area"]
|
||||
area_str = f"{area_n:10.2f}{area_s:10.2f}"
|
||||
|
||||
ang_str = ""
|
||||
@ -299,12 +288,104 @@ def export_1D(data, path, area_method=AREA_METHODS[0], lorentz=False, hkl_precis
|
||||
angle_center = (np.min(scan[angle]) + np.max(scan[angle])) / 2
|
||||
else:
|
||||
angle_center = scan[angle]
|
||||
|
||||
if angle == "twotheta" and export_target == "jana":
|
||||
angle_center /= 2
|
||||
|
||||
ang_str = ang_str + f"{angle_center:8g}"
|
||||
|
||||
ref = file_content[".comm"] if hkl_are_integers else file_content[".incomm"]
|
||||
if export_target == "jana":
|
||||
ang_str = ang_str + f"{scan['temp']:8}" + f"{scan['monitor']:8}"
|
||||
|
||||
ref = file_content[exts[0]] if hkl_are_integers else file_content[exts[1]]
|
||||
ref.append(idx_str + hkl_str + area_str + ang_str + "\n")
|
||||
|
||||
for ext, content in file_content.items():
|
||||
if content:
|
||||
with open(path + ext, "w") as out_file:
|
||||
out_file.writelines(content)
|
||||
|
||||
|
||||
def export_ccl_compare(data1, data2, path, export_target, hkl_precision=2):
|
||||
"""Exports compare data in the .comm/.incomm format for fullprof or .col/.incol format for jana.
|
||||
|
||||
Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files
|
||||
correspondingly. If no scans are present for a particular output format, that file won't be
|
||||
created.
|
||||
"""
|
||||
if export_target not in EXPORT_TARGETS:
|
||||
raise ValueError(f"Unknown export target: {export_target}.")
|
||||
|
||||
zebra_mode = data1[0]["zebra_mode"]
|
||||
exts = EXPORT_TARGETS[export_target]
|
||||
file_content = {ext: [] for ext in exts}
|
||||
|
||||
for scan1, scan2 in zip(data1, data2):
|
||||
if "fit" not in scan1:
|
||||
continue
|
||||
|
||||
idx_str = f"{scan1['idx']:6}"
|
||||
|
||||
h, k, l = scan1["h"], scan1["k"], scan1["l"]
|
||||
hkl_are_integers = isinstance(h, int) # if True, other indices are of type 'int' too
|
||||
if hkl_are_integers:
|
||||
hkl_str = f"{h:4}{k:4}{l:4}"
|
||||
else:
|
||||
hkl_str = f"{h:8.{hkl_precision}f}{k:8.{hkl_precision}f}{l:8.{hkl_precision}f}"
|
||||
|
||||
area_n1, area_s1 = scan1["area"]
|
||||
area_n2, area_s2 = scan2["area"]
|
||||
area_n = area_n1 - area_n2
|
||||
area_s = np.sqrt(area_s1 ** 2 + area_s2 ** 2)
|
||||
area_str = f"{area_n:10.2f}{area_s:10.2f}"
|
||||
|
||||
ang_str = ""
|
||||
for angle, _ in CCL_ANGLES[zebra_mode]:
|
||||
if angle == scan1["scan_motor"]:
|
||||
angle_center = (np.min(scan1[angle]) + np.max(scan1[angle])) / 2
|
||||
else:
|
||||
angle_center = scan1[angle]
|
||||
|
||||
if angle == "twotheta" and export_target == "jana":
|
||||
angle_center /= 2
|
||||
|
||||
ang_str = ang_str + f"{angle_center:8g}"
|
||||
|
||||
if export_target == "jana":
|
||||
ang_str = ang_str + f"{scan1['temp']:8}" + f"{scan1['monitor']:8}"
|
||||
|
||||
ref = file_content[exts[0]] if hkl_are_integers else file_content[exts[1]]
|
||||
ref.append(idx_str + hkl_str + area_str + ang_str + "\n")
|
||||
|
||||
for ext, content in file_content.items():
|
||||
if content:
|
||||
with open(path + ext, "w") as out_file:
|
||||
out_file.writelines(content)
|
||||
|
||||
|
||||
def export_param_study(data, param_data, path):
|
||||
file_content = []
|
||||
for scan, param in zip(data, param_data):
|
||||
if "fit" not in scan:
|
||||
continue
|
||||
|
||||
if not file_content:
|
||||
title_str = f"{'param':12}"
|
||||
for fit_param_name in scan["fit"].params:
|
||||
title_str = title_str + f"{fit_param_name:20}" + f"{'std_' + fit_param_name:20}"
|
||||
title_str = title_str + "file"
|
||||
file_content.append(title_str + "\n")
|
||||
|
||||
param_str = f"{param:<12.2f}"
|
||||
|
||||
fit_str = ""
|
||||
for fit_param in scan["fit"].params.values():
|
||||
fit_str = fit_str + f"{fit_param.value:<20.2f}" + f"{fit_param.stderr:<20.2f}"
|
||||
|
||||
_, fname_str = os.path.split(scan["original_filename"])
|
||||
|
||||
file_content.append(param_str + fit_str + fname_str + "\n")
|
||||
|
||||
if file_content:
|
||||
with open(path, "w") as out_file:
|
||||
out_file.writelines(file_content)
|
||||
|
@ -1,8 +1,8 @@
|
||||
import itertools
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from lmfit.models import GaussianModel, LinearModel, PseudoVoigtModel, VoigtModel
|
||||
from lmfit.models import Gaussian2dModel, GaussianModel, LinearModel, PseudoVoigtModel, VoigtModel
|
||||
from scipy.integrate import simpson, trapezoid
|
||||
|
||||
from .ccl_io import CCL_ANGLES
|
||||
|
||||
@ -22,18 +22,24 @@ MAX_RANGE_GAP = {
|
||||
"omega": 0.5,
|
||||
}
|
||||
|
||||
AREA_METHODS = ("fit_area", "int_area")
|
||||
|
||||
|
||||
def normalize_dataset(dataset, monitor=100_000):
|
||||
for scan in dataset:
|
||||
monitor_ratio = monitor / scan["monitor"]
|
||||
scan["Counts"] *= monitor_ratio
|
||||
scan["counts"] *= monitor_ratio
|
||||
scan["counts_err"] *= monitor_ratio
|
||||
scan["monitor"] = monitor
|
||||
|
||||
|
||||
def merge_duplicates(dataset):
|
||||
for scan_i, scan_j in itertools.combinations(dataset, 2):
|
||||
if _parameters_match(scan_i, scan_j):
|
||||
merge_scans(scan_i, scan_j)
|
||||
merged = np.zeros(len(dataset), dtype=np.bool)
|
||||
for ind_into, scan_into in enumerate(dataset):
|
||||
for ind_from, scan_from in enumerate(dataset[ind_into + 1 :], start=ind_into + 1):
|
||||
if _parameters_match(scan_into, scan_from) and not merged[ind_from]:
|
||||
merge_scans(scan_into, scan_from)
|
||||
merged[ind_from] = True
|
||||
|
||||
|
||||
def _parameters_match(scan1, scan2):
|
||||
@ -61,30 +67,92 @@ def _parameters_match(scan1, scan2):
|
||||
return True
|
||||
|
||||
|
||||
def merge_datasets(dataset1, dataset2):
|
||||
for scan_j in dataset2:
|
||||
for scan_i in dataset1:
|
||||
if _parameters_match(scan_i, scan_j):
|
||||
merge_scans(scan_i, scan_j)
|
||||
break
|
||||
def merge_datasets(dataset_into, dataset_from):
|
||||
scan_motors_into = dataset_into[0]["scan_motors"]
|
||||
scan_motors_from = dataset_from[0]["scan_motors"]
|
||||
if scan_motors_into != scan_motors_from:
|
||||
print(f"Scan motors mismatch between datasets: {scan_motors_into} vs {scan_motors_from}")
|
||||
return
|
||||
|
||||
dataset1.append(scan_j)
|
||||
merged = np.zeros(len(dataset_from), dtype=np.bool)
|
||||
for scan_into in dataset_into:
|
||||
for ind, scan_from in enumerate(dataset_from):
|
||||
if _parameters_match(scan_into, scan_from) and not merged[ind]:
|
||||
merge_scans(scan_into, scan_from)
|
||||
merged[ind] = True
|
||||
|
||||
for scan_from in dataset_from:
|
||||
dataset_into.append(scan_from)
|
||||
|
||||
|
||||
def merge_scans(scan1, scan2):
|
||||
omega = np.concatenate((scan1["omega"], scan2["omega"]))
|
||||
counts = np.concatenate((scan1["Counts"], scan2["Counts"]))
|
||||
def merge_scans(scan_into, scan_from):
|
||||
if "init_scan" not in scan_into:
|
||||
scan_into["init_scan"] = scan_into.copy()
|
||||
|
||||
index = np.argsort(omega)
|
||||
if "merged_scans" not in scan_into:
|
||||
scan_into["merged_scans"] = []
|
||||
|
||||
scan1["omega"] = omega[index]
|
||||
scan1["Counts"] = counts[index]
|
||||
if scan_from in scan_into["merged_scans"]:
|
||||
return
|
||||
|
||||
scan2["active"] = False
|
||||
scan_into["merged_scans"].append(scan_from)
|
||||
|
||||
fname1 = os.path.basename(scan1["original_filename"])
|
||||
fname2 = os.path.basename(scan2["original_filename"])
|
||||
print(f'Merging scans: {scan1["idx"]} ({fname1}) <-- {scan2["idx"]} ({fname2})')
|
||||
scan_motor = scan_into["scan_motor"] # the same as scan_from["scan_motor"]
|
||||
|
||||
pos_all = np.array([])
|
||||
val_all = np.array([])
|
||||
err_all = np.array([])
|
||||
for scan in [scan_into["init_scan"], *scan_into["merged_scans"]]:
|
||||
pos_all = np.append(pos_all, scan[scan_motor])
|
||||
val_all = np.append(val_all, scan["counts"])
|
||||
err_all = np.append(err_all, scan["counts_err"] ** 2)
|
||||
|
||||
sort_index = np.argsort(pos_all)
|
||||
pos_all = pos_all[sort_index]
|
||||
val_all = val_all[sort_index]
|
||||
err_all = err_all[sort_index]
|
||||
|
||||
pos_tmp = pos_all[:1]
|
||||
val_tmp = val_all[:1]
|
||||
err_tmp = err_all[:1]
|
||||
num_tmp = np.array([1])
|
||||
for pos, val, err in zip(pos_all[1:], val_all[1:], err_all[1:]):
|
||||
if pos - pos_tmp[-1] < 0.0005:
|
||||
# the repeated motor position
|
||||
val_tmp[-1] += val
|
||||
err_tmp[-1] += err
|
||||
num_tmp[-1] += 1
|
||||
else:
|
||||
# a new motor position
|
||||
pos_tmp = np.append(pos_tmp, pos)
|
||||
val_tmp = np.append(val_tmp, val)
|
||||
err_tmp = np.append(err_tmp, err)
|
||||
num_tmp = np.append(num_tmp, 1)
|
||||
|
||||
scan_into[scan_motor] = pos_tmp
|
||||
scan_into["counts"] = val_tmp / num_tmp
|
||||
scan_into["counts_err"] = np.sqrt(err_tmp)
|
||||
|
||||
scan_from["export"] = False
|
||||
|
||||
fname1 = os.path.basename(scan_into["original_filename"])
|
||||
fname2 = os.path.basename(scan_from["original_filename"])
|
||||
print(f'Merging scans: {scan_into["idx"]} ({fname1}) <-- {scan_from["idx"]} ({fname2})')
|
||||
|
||||
|
||||
def restore_scan(scan):
|
||||
if "merged_scans" in scan:
|
||||
for merged_scan in scan["merged_scans"]:
|
||||
merged_scan["export"] = True
|
||||
|
||||
if "init_scan" in scan:
|
||||
tmp = scan["init_scan"]
|
||||
scan.clear()
|
||||
scan.update(tmp)
|
||||
# force scan export to True, otherwise in the sequence of incorrectly merged scans
|
||||
# a <- b <- c the scan b will be restored with scan["export"] = False if restoring executed
|
||||
# in the same order, i.e. restore a -> restore b
|
||||
scan["export"] = True
|
||||
|
||||
|
||||
def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
@ -93,12 +161,18 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
if fit_to is None:
|
||||
fit_to = np.inf
|
||||
|
||||
y_fit = scan["Counts"]
|
||||
y_fit = scan["counts"]
|
||||
y_err = scan["counts_err"]
|
||||
x_fit = scan[scan["scan_motor"]]
|
||||
|
||||
# apply fitting range
|
||||
fit_ind = (fit_from <= x_fit) & (x_fit <= fit_to)
|
||||
if not np.any(fit_ind):
|
||||
print(f"No data in fit range for scan {scan['idx']}")
|
||||
return
|
||||
|
||||
y_fit = y_fit[fit_ind]
|
||||
y_err = y_err[fit_ind]
|
||||
x_fit = x_fit[fit_ind]
|
||||
|
||||
model = None
|
||||
@ -128,6 +202,17 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
else:
|
||||
param_hints[hint_name] = tmp
|
||||
|
||||
if "center" in param_name:
|
||||
if np.isneginf(param_hints["min"]):
|
||||
param_hints["min"] = np.min(x_fit)
|
||||
|
||||
if np.isposinf(param_hints["max"]):
|
||||
param_hints["max"] = np.max(x_fit)
|
||||
|
||||
if "sigma" in param_name:
|
||||
if np.isposinf(param_hints["max"]):
|
||||
param_hints["max"] = np.max(x_fit) - np.min(x_fit)
|
||||
|
||||
_model.set_param_hint(param_name, **param_hints)
|
||||
|
||||
if model is None:
|
||||
@ -135,5 +220,71 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
|
||||
else:
|
||||
model += _model
|
||||
|
||||
weights = [1 / np.sqrt(val) if val != 0 else 1 for val in y_fit]
|
||||
weights = [1 / y_err if y_err != 0 else 1 for y_err in y_err]
|
||||
scan["fit"] = model.fit(y_fit, x=x_fit, weights=weights)
|
||||
|
||||
|
||||
def get_area(scan, area_method, lorentz):
|
||||
if "fit" not in scan:
|
||||
return
|
||||
|
||||
if area_method not in AREA_METHODS:
|
||||
raise ValueError(f"Unknown area method: {area_method}.")
|
||||
|
||||
if area_method == "fit_area":
|
||||
area_v = 0
|
||||
area_s = 0
|
||||
for name, param in scan["fit"].params.items():
|
||||
if "amplitude" in name:
|
||||
area_v += np.nan if param.value is None else param.value
|
||||
area_s += np.nan if param.stderr is None else param.stderr
|
||||
|
||||
else: # area_method == "int_area"
|
||||
y_val = scan["counts"]
|
||||
x_val = scan[scan["scan_motor"]]
|
||||
y_bkg = scan["fit"].eval_components(x=x_val)["f0_"]
|
||||
area_v = simpson(y_val, x=x_val) - trapezoid(y_bkg, x=x_val)
|
||||
area_s = np.sqrt(area_v)
|
||||
|
||||
if lorentz:
|
||||
# lorentz correction to area
|
||||
if scan["zebra_mode"] == "bi":
|
||||
twotheta = np.deg2rad(scan["twotheta"])
|
||||
corr_factor = np.sin(twotheta)
|
||||
else: # zebra_mode == "nb":
|
||||
gamma = np.deg2rad(scan["gamma"])
|
||||
nu = np.deg2rad(scan["nu"])
|
||||
corr_factor = np.sin(gamma) * np.cos(nu)
|
||||
|
||||
area_v = np.abs(area_v * corr_factor)
|
||||
area_s = np.abs(area_s * corr_factor)
|
||||
|
||||
scan["area"] = (area_v, area_s)
|
||||
|
||||
|
||||
def fit_event(scan, fr_from, fr_to, y_from, y_to, x_from, x_to):
|
||||
data_roi = scan["data"][fr_from:fr_to, y_from:y_to, x_from:x_to]
|
||||
|
||||
model = GaussianModel()
|
||||
fr = np.arange(fr_from, fr_to)
|
||||
counts_per_fr = np.sum(data_roi, axis=(1, 2))
|
||||
params = model.guess(counts_per_fr, fr)
|
||||
result = model.fit(counts_per_fr, x=fr, params=params)
|
||||
frC = result.params["center"].value
|
||||
intensity = result.params["height"].value
|
||||
|
||||
counts_std = counts_per_fr.std()
|
||||
counts_mean = counts_per_fr.mean()
|
||||
snr = 0 if counts_std == 0 else counts_mean / counts_std
|
||||
|
||||
model = Gaussian2dModel()
|
||||
xs, ys = np.meshgrid(np.arange(x_from, x_to), np.arange(y_from, y_to))
|
||||
xs = xs.flatten()
|
||||
ys = ys.flatten()
|
||||
counts = np.sum(data_roi, axis=0).flatten()
|
||||
params = model.guess(counts, xs, ys)
|
||||
result = model.fit(counts, x=xs, y=ys, params=params)
|
||||
xC = result.params["centerx"].value
|
||||
yC = result.params["centery"].value
|
||||
|
||||
scan["fit"] = {"frame": frC, "x_pos": xC, "y_pos": yC, "intensity": intensity, "snr": snr}
|
||||
|
@ -1,6 +1,11 @@
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
|
||||
META_MATRIX = ("UB")
|
||||
META_CELL = ("cell")
|
||||
META_STR = ("name")
|
||||
|
||||
def read_h5meta(filepath):
|
||||
"""Open and parse content of a h5meta file.
|
||||
|
||||
@ -23,18 +28,37 @@ def parse_h5meta(file):
|
||||
line = line.strip()
|
||||
if line.startswith("#begin "):
|
||||
section = line[len("#begin ") :]
|
||||
content[section] = []
|
||||
if section in ("detector parameters", "crystal"):
|
||||
content[section] = {}
|
||||
else:
|
||||
content[section] = []
|
||||
|
||||
elif line.startswith("#end"):
|
||||
section = None
|
||||
|
||||
elif section:
|
||||
content[section].append(line)
|
||||
if section in ("detector parameters", "crystal"):
|
||||
if "=" in line:
|
||||
variable, value = line.split("=", 1)
|
||||
variable = variable.strip()
|
||||
value = value.strip()
|
||||
|
||||
if variable in META_STR:
|
||||
pass
|
||||
elif variable in META_CELL:
|
||||
value = np.array(value.split(",")[:6], dtype=np.float)
|
||||
elif variable in META_MATRIX:
|
||||
value = np.array(value.split(",")[:9], dtype=np.float).reshape(3, 3)
|
||||
else: # default is a single float number
|
||||
value = float(value)
|
||||
content[section][variable] = value
|
||||
else:
|
||||
content[section].append(line)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def read_detector_data(filepath):
|
||||
def read_detector_data(filepath, cami_meta=None):
|
||||
"""Read detector data and angles from an h5 file.
|
||||
|
||||
Args:
|
||||
@ -51,12 +75,18 @@ def read_detector_data(filepath):
|
||||
data = data.reshape(n, rows, cols)
|
||||
|
||||
det_data = {"data": data}
|
||||
det_data["original_filename"] = filepath
|
||||
|
||||
if "/entry1/zebra_mode" in h5f:
|
||||
det_data["zebra_mode"] = h5f["/entry1/zebra_mode"][0].decode()
|
||||
else:
|
||||
det_data["zebra_mode"] = "nb"
|
||||
|
||||
# overwrite zebra_mode from cami
|
||||
if cami_meta is not None:
|
||||
if "zebra_mode" in cami_meta:
|
||||
det_data["zebra_mode"] = cami_meta["zebra_mode"][0]
|
||||
|
||||
# om, sometimes ph
|
||||
if det_data["zebra_mode"] == "nb":
|
||||
det_data["omega"] = h5f["/entry1/area_detector2/rotation_angle"][:]
|
||||
@ -70,6 +100,8 @@ def read_detector_data(filepath):
|
||||
det_data["chi"] = h5f["/entry1/sample/chi"][:] # ch
|
||||
det_data["phi"] = h5f["/entry1/sample/phi"][:] # ph
|
||||
det_data["ub"] = h5f["/entry1/sample/UB"][:].reshape(3, 3)
|
||||
det_data["name"] = h5f["/entry1/sample/name"][0].decode()
|
||||
det_data["cell"] = h5f["/entry1/sample/cell"][:]
|
||||
|
||||
for var in ("omega", "gamma", "nu", "chi", "phi"):
|
||||
if abs(det_data[var][0] - det_data[var][-1]) > 0.1:
|
||||
@ -85,4 +117,22 @@ def read_detector_data(filepath):
|
||||
if "/entry1/sample/temperature" in h5f:
|
||||
det_data["temp"] = h5f["/entry1/sample/temperature"][:]
|
||||
|
||||
# overwrite metadata from .cami
|
||||
if cami_meta is not None:
|
||||
if "crystal" in cami_meta:
|
||||
cami_meta_crystal = cami_meta["crystal"]
|
||||
if "name" in cami_meta_crystal:
|
||||
det_data["name"] = cami_meta_crystal["name"]
|
||||
if "UB" in cami_meta_crystal:
|
||||
det_data["ub"] = cami_meta_crystal["UB"]
|
||||
if "cell" in cami_meta_crystal:
|
||||
det_data["cell"] = cami_meta_crystal["cell"]
|
||||
if "lambda" in cami_meta_crystal:
|
||||
det_data["wave"] = cami_meta_crystal["lambda"]
|
||||
|
||||
if "detector parameters" in cami_meta:
|
||||
cami_meta_detparam = cami_meta["detector parameters"]
|
||||
if "dist1" in cami_meta_detparam:
|
||||
det_data["ddist"] = cami_meta_detparam["dist1"]
|
||||
|
||||
return det_data
|
||||
|
20
pyzebra/utils.py
Normal file
20
pyzebra/utils.py
Normal file
@ -0,0 +1,20 @@
|
||||
import os
|
||||
|
||||
ZEBRA_PROPOSALS_PATHS = [
|
||||
f"/afs/psi.ch/project/sinqdata/{year}/zebra/" for year in (2016, 2017, 2018, 2020, 2021)
|
||||
]
|
||||
|
||||
def find_proposal_path(proposal):
|
||||
proposal = proposal.strip()
|
||||
if proposal:
|
||||
for zebra_proposals_path in ZEBRA_PROPOSALS_PATHS:
|
||||
proposal_path = os.path.join(zebra_proposals_path, proposal)
|
||||
if os.path.isdir(proposal_path):
|
||||
# found it
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"Can not find data for proposal '{proposal}'.")
|
||||
else:
|
||||
proposal_path = ""
|
||||
|
||||
return proposal_path
|
102
pyzebra/xtal.py
102
pyzebra/xtal.py
@ -1,15 +1,5 @@
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
from numba import njit
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import pyzebra
|
||||
|
||||
try:
|
||||
from matplotlib import pyplot as plt
|
||||
except ImportError:
|
||||
print("matplotlib is not available")
|
||||
|
||||
pi_r = 180 / np.pi
|
||||
|
||||
@ -382,6 +372,17 @@ def ang2hkl(wave, ddist, gammad, om, ch, ph, nud, ub, x, y):
|
||||
return hkl
|
||||
|
||||
|
||||
def ang_proc(wave, ddist, gammad, om, ch, ph, nud, x, y):
|
||||
"""Utility function to calculate ch, ph, ga, om
|
||||
"""
|
||||
ga, nu = det2pol(ddist, gammad, nud, x, y)
|
||||
z1 = z1frmd(wave, ga, om, ch, ph, nu)
|
||||
ch2, ph2 = eqchph(z1)
|
||||
ch, ph, ga, om = fixdnu(wave, z1, ch2, ph2, nu)
|
||||
|
||||
return ch, ph, ga, om
|
||||
|
||||
|
||||
def gauss(x, *p):
|
||||
"""Defines Gaussian function
|
||||
|
||||
@ -393,84 +394,3 @@ def gauss(x, *p):
|
||||
"""
|
||||
A, mu, sigma = p
|
||||
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))
|
||||
|
||||
|
||||
def box_int(file, box):
|
||||
"""Calculates center of the peak in the NB-geometry angles and Intensity of the peak
|
||||
|
||||
Args:
|
||||
file name, box size [x0:xN, y0:yN, fr0:frN]
|
||||
|
||||
Returns:
|
||||
gamma, omPeak, nu polar angles, Int and data for 3 fit plots
|
||||
"""
|
||||
|
||||
dat = pyzebra.read_detector_data(file)
|
||||
|
||||
sttC = dat["gamma"][0]
|
||||
om = dat["omega"]
|
||||
nuC = dat["nu"][0]
|
||||
ddist = dat["ddist"]
|
||||
|
||||
# defining indices
|
||||
x0, xN, y0, yN, fr0, frN = box
|
||||
|
||||
# omega fit
|
||||
om = dat["omega"][fr0:frN]
|
||||
cnts = np.sum(dat["data"][fr0:frN, y0:yN, x0:xN], axis=(1, 2))
|
||||
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(cnts)), cnts, p0=p0)
|
||||
|
||||
frC = fr0 + coeff[1]
|
||||
omF = dat["omega"][math.floor(frC)]
|
||||
omC = dat["omega"][math.ceil(frC)]
|
||||
frStep = frC - math.floor(frC)
|
||||
omStep = omC - omF
|
||||
omP = omF + omStep * frStep
|
||||
Int = coeff[1] * abs(coeff[2] * omStep) * math.sqrt(2) * math.sqrt(np.pi)
|
||||
# omega plot
|
||||
x_fit = np.linspace(0, len(cnts), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.figure()
|
||||
plt.subplot(131)
|
||||
plt.plot(range(len(cnts)), cnts)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("Frame N of the box")
|
||||
label = "om"
|
||||
# gamma fit
|
||||
sliceXY = dat["data"][fr0:frN, y0:yN, x0:xN]
|
||||
sliceXZ = np.sum(sliceXY, axis=1)
|
||||
sliceYZ = np.sum(sliceXY, axis=2)
|
||||
|
||||
projX = np.sum(sliceXZ, axis=0)
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(projX)), projX, p0=p0)
|
||||
x = x0 + coeff[1]
|
||||
# gamma plot
|
||||
x_fit = np.linspace(0, len(projX), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.subplot(132)
|
||||
plt.plot(range(len(projX)), projX)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("X-pixel of the box")
|
||||
|
||||
# nu fit
|
||||
projY = np.sum(sliceYZ, axis=0)
|
||||
p0 = [1.0, 0.0, 1.0]
|
||||
coeff, var_matrix = curve_fit(gauss, range(len(projY)), projY, p0=p0)
|
||||
y = y0 + coeff[1]
|
||||
# nu plot
|
||||
x_fit = np.linspace(0, len(projY), 100)
|
||||
y_fit = gauss(x_fit, *coeff)
|
||||
plt.subplot(133)
|
||||
plt.plot(range(len(projY)), projY)
|
||||
plt.plot(x_fit, y_fit)
|
||||
plt.ylabel("Intensity in the box")
|
||||
plt.xlabel("Y-pixel of the box")
|
||||
|
||||
ga, nu = pyzebra.det2pol(ddist, sttC, nuC, x, y)
|
||||
|
||||
return ga[0], omP, nu[0], Int
|
||||
|
@ -1,4 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate prod
|
||||
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80
|
||||
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80 --spind-path=/home/pyzebra/spind
|
||||
|
@ -1,4 +1,4 @@
|
||||
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
|
||||
|
||||
conda activate test
|
||||
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006
|
||||
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006 --spind-path=/home/pyzebra/spind
|
||||
|
Reference in New Issue
Block a user