Compare commits

..

No commits in common. "main" and "0.6.4" have entirely different histories.
main ... 0.6.4

34 changed files with 2303 additions and 4156 deletions

View File

@ -1,53 +0,0 @@
name: pyzebra CI/CD pipeline
on:
push:
branches:
- main
tags:
- '*'
env:
CONDA: /opt/miniforge3
jobs:
prepare:
runs-on: pyzebra
steps:
- run: $CONDA/bin/conda config --add channels conda-forge
- run: $CONDA/bin/conda config --set solver libmamba
test-env:
runs-on: pyzebra
needs: prepare
if: github.ref == 'refs/heads/main'
env:
BUILD_DIR: ${{ runner.temp }}/conda_build
steps:
- name: Checkout repository
uses: actions/checkout@v4
- run: $CONDA/bin/conda build --no-anaconda-upload --output-folder $BUILD_DIR ./conda-recipe
- run: $CONDA/bin/conda remove --name test --all --keep-env -y
- run: $CONDA/bin/conda install --name test --channel $BUILD_DIR python=3.8 pyzebra -y
- run: sudo systemctl restart pyzebra-test.service
prod-env:
runs-on: pyzebra
needs: prepare
if: startsWith(github.ref, 'refs/tags/')
env:
BUILD_DIR: ${{ runner.temp }}/conda_build
steps:
- name: Checkout repository
uses: actions/checkout@v4
- run: $CONDA/bin/conda build --token ${{ secrets.ANACONDA_TOKEN }} --output-folder $BUILD_DIR ./conda-recipe
- run: $CONDA/bin/conda remove --name prod --all --keep-env -y
- run: $CONDA/bin/conda install --name prod --channel $BUILD_DIR python=3.8 pyzebra -y
- run: sudo systemctl restart pyzebra-prod.service
cleanup:
runs-on: pyzebra
needs: [test-env, prod-env]
if: always()
steps:
- run: $CONDA/bin/conda build purge-all

25
.github/workflows/deployment.yaml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Deployment
on:
push:
tags:
- '*'
jobs:
publish-conda-package:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Prepare
run: |
$CONDA/bin/conda install --quiet --yes conda-build anaconda-client
$CONDA/bin/conda config --append channels conda-forge
$CONDA/bin/conda config --set anaconda_upload yes
- name: Build and upload
env:
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
run: |
$CONDA/bin/conda build --token $ANACONDA_TOKEN conda-recipe

View File

@ -15,10 +15,10 @@ build:
requirements: requirements:
build: build:
- python >=3.8 - python >=3.7
- setuptools - setuptools
run: run:
- python >=3.8 - python >=3.7
- numpy - numpy
- scipy - scipy
- h5py - h5py
@ -28,7 +28,7 @@ requirements:
about: about:
home: https://gitlab.psi.ch/zebra/pyzebra home: https://github.com/paulscherrerinstitute/pyzebra
summary: {{ data['description'] }} summary: {{ data['description'] }}
license: GNU GPLv3 license: GNU GPLv3
license_file: LICENSE license_file: LICENSE

View File

@ -7,19 +7,18 @@ import subprocess
def main(): def main():
default_branch = "main"
branch = subprocess.check_output("git rev-parse --abbrev-ref HEAD", shell=True).decode().strip() branch = subprocess.check_output("git rev-parse --abbrev-ref HEAD", shell=True).decode().strip()
if branch != default_branch: if branch != "master":
print(f"Aborting, not on '{default_branch}' branch.") print("Aborting, not on 'master' branch.")
return return
version_filepath = os.path.join(os.path.basename(os.path.dirname(__file__)), "__init__.py") filepath = "pyzebra/__init__.py"
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("level", type=str, choices=["patch", "minor", "major"]) parser.add_argument("level", type=str, choices=["patch", "minor", "major"])
args = parser.parse_args() args = parser.parse_args()
with open(version_filepath) as f: with open(filepath) as f:
file_content = f.read() file_content = f.read()
version = re.search(r'__version__ = "(.*?)"', file_content).group(1) version = re.search(r'__version__ = "(.*?)"', file_content).group(1)
@ -37,12 +36,11 @@ def main():
new_version = f"{major}.{minor}.{patch}" new_version = f"{major}.{minor}.{patch}"
with open(version_filepath, "w") as f: with open(filepath, "w") as f:
f.write(re.sub(r'__version__ = "(.*?)"', f'__version__ = "{new_version}"', file_content)) f.write(re.sub(r'__version__ = "(.*?)"', f'__version__ = "{new_version}"', file_content))
os.system(f"git commit {version_filepath} -m 'Updating for version {new_version}'") os.system(f"git commit {filepath} -m 'Updating for version {new_version}'")
os.system(f"git tag -a {new_version} -m 'Release {new_version}'") os.system(f"git tag -a {new_version} -m 'Release {new_version}'")
os.system("git push --follow-tags")
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -2,8 +2,7 @@ from pyzebra.anatric import *
from pyzebra.ccl_io import * from pyzebra.ccl_io import *
from pyzebra.ccl_process import * from pyzebra.ccl_process import *
from pyzebra.h5 import * from pyzebra.h5 import *
from pyzebra.sxtal_refgen import *
from pyzebra.utils import * from pyzebra.utils import *
from pyzebra.xtal import * from pyzebra.xtal import *
__version__ = "0.7.11" __version__ = "0.6.4"

View File

@ -1,10 +1,12 @@
import logging
import subprocess import subprocess
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
logger = logging.getLogger(__name__)
DATA_FACTORY_IMPLEMENTATION = ["trics", "morph", "d10"] DATA_FACTORY_IMPLEMENTATION = [
"trics",
"morph",
"d10",
]
REFLECTION_PRINTER_FORMATS = [ REFLECTION_PRINTER_FORMATS = [
"rafin", "rafin",
@ -19,11 +21,11 @@ REFLECTION_PRINTER_FORMATS = [
"oksana", "oksana",
] ]
ANATRIC_PATH = "/afs/psi.ch/project/sinq/rhel8/bin/anatric" ANATRIC_PATH = "/afs/psi.ch/project/sinq/rhel7/bin/anatric"
ALGORITHMS = ["adaptivemaxcog", "adaptivedynamic"] ALGORITHMS = ["adaptivemaxcog", "adaptivedynamic"]
def anatric(config_file, anatric_path=ANATRIC_PATH, cwd=None, log=logger): def anatric(config_file, anatric_path=ANATRIC_PATH, cwd=None):
comp_proc = subprocess.run( comp_proc = subprocess.run(
[anatric_path, config_file], [anatric_path, config_file],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
@ -32,8 +34,8 @@ def anatric(config_file, anatric_path=ANATRIC_PATH, cwd=None, log=logger):
check=True, check=True,
text=True, text=True,
) )
log.info(" ".join(comp_proc.args)) print(" ".join(comp_proc.args))
log.info(comp_proc.stdout) print(comp_proc.stdout)
class AnatricConfig: class AnatricConfig:

View File

@ -1,4 +0,0 @@
from pyzebra.app.download_files import DownloadFiles
from pyzebra.app.fit_controls import FitControls
from pyzebra.app.input_controls import InputControls
from pyzebra.app.plot_hkl import PlotHKL

75
pyzebra/app/app.py Normal file
View File

@ -0,0 +1,75 @@
import logging
import sys
from io import StringIO
import pyzebra
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import Button, Panel, Tabs, TextAreaInput, TextInput
import panel_ccl_integrate
import panel_ccl_compare
import panel_hdf_anatric
import panel_hdf_param_study
import panel_hdf_viewer
import panel_param_study
import panel_spind
doc = curdoc()
sys.stdout = StringIO()
stdout_textareainput = TextAreaInput(title="print output:", height=150)
bokeh_stream = StringIO()
bokeh_handler = logging.StreamHandler(bokeh_stream)
bokeh_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
bokeh_logger = logging.getLogger("bokeh")
bokeh_logger.addHandler(bokeh_handler)
bokeh_log_textareainput = TextAreaInput(title="server output:", height=150)
def proposal_textinput_callback(_attr, _old, _new):
apply_button.disabled = False
proposal_textinput = TextInput(title="Proposal number:", name="")
proposal_textinput.on_change("value_input", proposal_textinput_callback)
doc.proposal_textinput = proposal_textinput
def apply_button_callback():
try:
proposal_path = pyzebra.find_proposal_path(proposal_textinput.value)
except ValueError as e:
print(e)
return
proposal_textinput.name = proposal_path
apply_button.disabled = True
apply_button = Button(label="Apply", button_type="primary")
apply_button.on_click(apply_button_callback)
# Final layout
doc.add_root(
column(
Tabs(
tabs=[
Panel(child=column(proposal_textinput, apply_button), title="user config"),
panel_hdf_viewer.create(),
panel_hdf_anatric.create(),
panel_ccl_integrate.create(),
panel_ccl_compare.create(),
panel_param_study.create(),
panel_hdf_param_study.create(),
panel_spind.create(),
]
),
row(stdout_textareainput, bokeh_log_textareainput, sizing_mode="scale_both"),
)
)
def update_stdout():
stdout_textareainput.value = sys.stdout.getvalue()
bokeh_log_textareainput.value = bokeh_stream.getvalue()
doc.add_periodic_callback(update_stdout, 1000)

View File

@ -1,11 +1,72 @@
import argparse
import logging
import os import os
import subprocess
import sys from bokeh.application.application import Application
from bokeh.application.handlers import ScriptHandler
from bokeh.server.server import Server
from pyzebra.anatric import ANATRIC_PATH
from pyzebra.app.handler import PyzebraHandler
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.INFO)
logger = logging.getLogger(__name__)
def main(): def main():
app_path = os.path.dirname(os.path.abspath(__file__)) """The pyzebra command line interface.
subprocess.run(["bokeh", "serve", app_path, *sys.argv[1:]], check=True)
This is a wrapper around a bokeh server that provides an interface to launch the application,
bundled with the pyzebra package.
"""
app_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "app.py")
parser = argparse.ArgumentParser(
prog="pyzebra", formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"--port", type=int, default=5006, help="port to listen on for HTTP requests"
)
parser.add_argument(
"--allow-websocket-origin",
metavar="HOST[:PORT]",
type=str,
action="append",
default=None,
help="hostname that can connect to the server websocket",
)
parser.add_argument(
"--anatric-path", type=str, default=ANATRIC_PATH, help="path to anatric executable",
)
parser.add_argument(
"--spind-path", type=str, default=None, help="path to spind scripts folder",
)
parser.add_argument(
"--args",
nargs=argparse.REMAINDER,
default=[],
help="command line arguments for the pyzebra application",
)
args = parser.parse_args()
logger.info(app_path)
pyzebra_handler = PyzebraHandler(args.anatric_path, args.spind_path)
handler = ScriptHandler(filename=app_path, argv=args.args)
server = Server(
{"/": Application(pyzebra_handler, handler)},
port=args.port,
allow_websocket_origin=args.allow_websocket_origin,
)
server.start()
server.io_loop.start()
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -1,45 +0,0 @@
from bokeh.models import Button, ColumnDataSource, CustomJS
js_code = """
let j = 0;
for (let i = 0; i < source.data['name'].length; i++) {
if (source.data['content'][i] === "") continue;
setTimeout(function() {
const blob = new Blob([source.data['content'][i]], {type: 'text/plain'})
const link = document.createElement('a');
document.body.appendChild(link);
const url = window.URL.createObjectURL(blob);
link.href = url;
link.download = source.data['name'][i] + source.data['ext'][i];
link.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(link);
}, 100 * j)
j++;
}
"""
class DownloadFiles:
def __init__(self, n_files):
self.n_files = n_files
source = ColumnDataSource(
data=dict(content=[""] * n_files, name=[""] * n_files, ext=[""] * n_files)
)
self._source = source
label = "Download File" if n_files == 1 else "Download Files"
button = Button(label=label, button_type="success", width=200)
button.js_on_click(CustomJS(args={"source": source}, code=js_code))
self.button = button
def set_contents(self, contents):
self._source.data.update(content=contents)
def set_names(self, names):
self._source.data.update(name=names)
def set_extensions(self, extensions):
self._source.data.update(ext=extensions)

View File

@ -1,175 +0,0 @@
import types
from bokeh.io import curdoc
from bokeh.models import (
Button,
CellEditor,
CheckboxEditor,
CheckboxGroup,
ColumnDataSource,
DataTable,
Dropdown,
MultiSelect,
NumberEditor,
RadioGroup,
Spinner,
TableColumn,
TextAreaInput,
)
import pyzebra
def _params_factory(function):
if function == "linear":
param_names = ["slope", "intercept"]
elif function == "gaussian":
param_names = ["amplitude", "center", "sigma"]
elif function == "voigt":
param_names = ["amplitude", "center", "sigma", "gamma"]
elif function == "pvoigt":
param_names = ["amplitude", "center", "sigma", "fraction"]
elif function == "pseudovoigt1":
param_names = ["amplitude", "center", "g_sigma", "l_sigma", "fraction"]
else:
raise ValueError("Unknown fit function")
n = len(param_names)
params = dict(
param=param_names, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n
)
if function == "linear":
params["value"] = [0, 1]
params["vary"] = [False, True]
params["min"] = [None, 0]
elif function == "gaussian":
params["min"] = [0, None, None]
return params
class FitControls:
def __init__(self):
self.log = curdoc().logger
self.params = {}
def add_function_button_callback(click):
# bokeh requires (str, str) for MultiSelect options
new_tag = f"{click.item}-{function_select.tags[0]}"
function_select.options.append((new_tag, click.item))
self.params[new_tag] = _params_factory(click.item)
function_select.tags[0] += 1
add_function_button = Dropdown(
label="Add fit function",
menu=[
("Linear", "linear"),
("Gaussian", "gaussian"),
("Voigt", "voigt"),
("Pseudo Voigt", "pvoigt"),
# ("Pseudo Voigt1", "pseudovoigt1"),
],
width=145,
)
add_function_button.on_click(add_function_button_callback)
self.add_function_button = add_function_button
def function_list_callback(_attr, old, new):
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
if len(new) > 1:
# drop selection to the previous one
function_select.value = old
return
if len(old) > 1:
# skip unnecessary update caused by selection drop
return
if new:
params_table_source.data.update(self.params[new[0]])
else:
params_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
function_select = MultiSelect(options=[], height=120, width=145)
function_select.tags = [0]
function_select.on_change("value", function_list_callback)
self.function_select = function_select
def remove_function_button_callback():
if function_select.value:
sel_tag = function_select.value[0]
del self.params[sel_tag]
for elem in function_select.options:
if elem[0] == sel_tag:
function_select.options.remove(elem)
break
function_select.value = []
remove_function_button = Button(label="Remove fit function", width=145)
remove_function_button.on_click(remove_function_button_callback)
self.remove_function_button = remove_function_button
params_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
self.params_table = DataTable(
source=params_table_source,
columns=[
TableColumn(field="param", title="Parameter", editor=CellEditor()),
TableColumn(field="value", title="Value", editor=NumberEditor()),
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
TableColumn(field="min", title="Min", editor=NumberEditor()),
TableColumn(field="max", title="Max", editor=NumberEditor()),
],
height=200,
width=350,
index_position=None,
editable=True,
auto_edit=True,
)
# start with `background` and `gauss` fit functions added
add_function_button_callback(types.SimpleNamespace(item="linear"))
add_function_button_callback(types.SimpleNamespace(item="gaussian"))
function_select.value = ["gaussian-1"] # put selection on gauss
self.from_spinner = Spinner(title="Fit from:", width=145)
self.to_spinner = Spinner(title="to:", width=145)
self.area_method_radiogroup = RadioGroup(labels=["Function", "Area"], active=0, width=145)
self.lorentz_checkbox = CheckboxGroup(
labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5)
)
self.result_textarea = TextAreaInput(title="Fit results:", width=750, height=200)
def _process_scan(self, scan):
pyzebra.fit_scan(
scan,
self.params,
fit_from=self.from_spinner.value,
fit_to=self.to_spinner.value,
log=self.log,
)
pyzebra.get_area(
scan,
area_method=pyzebra.AREA_METHODS[self.area_method_radiogroup.active],
lorentz=self.lorentz_checkbox.active,
)
def fit_scan(self, scan):
self._process_scan(scan)
def fit_dataset(self, dataset):
for scan in dataset:
if scan["export"]:
self._process_scan(scan)
def update_result_textarea(self, scan):
fit = scan.get("fit")
if fit is None:
self.result_textarea.value = ""
else:
self.result_textarea.value = fit.fit_report()

32
pyzebra/app/handler.py Normal file
View File

@ -0,0 +1,32 @@
from bokeh.application.handlers import Handler
class PyzebraHandler(Handler):
"""Provides a mechanism for generic bokeh applications to build up new streamvis documents.
"""
def __init__(self, anatric_path, spind_path):
"""Initialize a pyzebra handler for bokeh applications.
Args:
args (Namespace): Command line parsed arguments.
"""
super().__init__() # no-op
self.anatric_path = anatric_path
self.spind_path = spind_path
def modify_document(self, doc):
"""Modify an application document with pyzebra specific features.
Args:
doc (Document) : A bokeh Document to update in-place
Returns:
Document
"""
doc.title = "pyzebra"
doc.anatric_path = self.anatric_path
doc.spind_path = self.spind_path
return doc

View File

@ -1,159 +0,0 @@
import base64
import io
import os
from bokeh.io import curdoc
from bokeh.models import Button, FileInput, MultiSelect, Spinner
import pyzebra
class InputControls:
def __init__(self, dataset, dlfiles, on_file_open=lambda: None, on_monitor_change=lambda: None):
doc = curdoc()
log = doc.logger
def filelist_select_update_for_proposal():
proposal_path = proposal_textinput.name
if proposal_path:
file_list = []
for file in os.listdir(proposal_path):
if file.endswith((".ccl", ".dat")):
file_list.append((os.path.join(proposal_path, file), file))
filelist_select.options = file_list
open_button.disabled = False
append_button.disabled = False
else:
filelist_select.options = []
open_button.disabled = True
append_button.disabled = True
doc.add_periodic_callback(filelist_select_update_for_proposal, 5000)
def proposal_textinput_callback(_attr, _old, _new):
filelist_select_update_for_proposal()
proposal_textinput = doc.proposal_textinput
proposal_textinput.on_change("name", proposal_textinput_callback)
filelist_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
self.filelist_select = filelist_select
def open_button_callback():
new_data = []
for f_path in self.filelist_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data, log=log)
dlfiles.set_names([base] * dlfiles.n_files)
else:
pyzebra.merge_datasets(new_data, file_data, log=log)
if new_data:
dataset.clear()
dataset.extend(new_data)
on_file_open()
append_upload_button.disabled = False
open_button = Button(label="Open New", width=100, disabled=True)
open_button.on_click(open_button_callback)
self.open_button = open_button
def append_button_callback():
file_data = []
for f_path in self.filelist_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(dataset, file_data, log=log)
if file_data:
on_file_open()
append_button = Button(label="Append", width=100, disabled=True)
append_button.on_click(append_button_callback)
self.append_button = append_button
def upload_button_callback(_attr, _old, _new):
new_data = []
for f_str, f_name in zip(upload_button.value, upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data, log=log)
dlfiles.set_names([base] * dlfiles.n_files)
else:
pyzebra.merge_datasets(new_data, file_data, log=log)
if new_data:
dataset.clear()
dataset.extend(new_data)
on_file_open()
append_upload_button.disabled = False
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
upload_button.on_change("filename", upload_button_callback)
self.upload_button = upload_button
def append_upload_button_callback(_attr, _old, _new):
file_data = []
for f_str, f_name in zip(append_upload_button.value, append_upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(dataset, file_data, log=log)
if file_data:
on_file_open()
append_upload_button = FileInput(
accept=".ccl,.dat", multiple=True, width=200, disabled=True
)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
append_upload_button.on_change("filename", append_upload_button_callback)
self.append_upload_button = append_upload_button
def monitor_spinner_callback(_attr, _old, new):
if dataset:
pyzebra.normalize_dataset(dataset, new)
on_monitor_change()
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
monitor_spinner.on_change("value", monitor_spinner_callback)
self.monitor_spinner = monitor_spinner

View File

@ -1,112 +0,0 @@
import argparse
import logging
from io import StringIO
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import Button, Panel, Tabs, TextAreaInput, TextInput
import pyzebra
from pyzebra.app import (
panel_ccl_compare,
panel_ccl_integrate,
panel_ccl_prepare,
panel_hdf_anatric,
panel_hdf_param_study,
panel_hdf_viewer,
panel_param_study,
panel_plot_data,
panel_spind,
)
doc = curdoc()
doc.title = "pyzebra"
parser = argparse.ArgumentParser()
parser.add_argument(
"--anatric-path", type=str, default=pyzebra.ANATRIC_PATH, help="path to anatric executable"
)
parser.add_argument(
"--sxtal-refgen-path",
type=str,
default=pyzebra.SXTAL_REFGEN_PATH,
help="path to Sxtal_Refgen executable",
)
parser.add_argument("--spind-path", type=str, default=None, help="path to spind scripts folder")
args = parser.parse_args()
doc.anatric_path = args.anatric_path
doc.spind_path = args.spind_path
doc.sxtal_refgen_path = args.sxtal_refgen_path
stream = StringIO()
handler = logging.StreamHandler(stream)
handler.setFormatter(
logging.Formatter(fmt="%(asctime)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
)
logger = logging.getLogger(str(id(doc)))
logger.setLevel(logging.INFO)
logger.addHandler(handler)
doc.logger = logger
log_textareainput = TextAreaInput(title="Logging output:")
def proposal_textinput_callback(_attr, _old, _new):
apply_button.disabled = False
proposal_textinput = TextInput(title="Proposal number:", name="")
proposal_textinput.on_change("value_input", proposal_textinput_callback)
doc.proposal_textinput = proposal_textinput
def apply_button_callback():
proposal = proposal_textinput.value.strip()
if proposal:
try:
proposal_path = pyzebra.find_proposal_path(proposal)
except ValueError as e:
logger.exception(e)
return
apply_button.disabled = True
else:
proposal_path = ""
proposal_textinput.name = proposal_path
apply_button = Button(label="Apply", button_type="primary")
apply_button.on_click(apply_button_callback)
# Final layout
doc.add_root(
column(
Tabs(
tabs=[
Panel(child=column(proposal_textinput, apply_button), title="user config"),
panel_hdf_viewer.create(),
panel_hdf_anatric.create(),
panel_ccl_prepare.create(),
panel_plot_data.create(),
panel_ccl_integrate.create(),
panel_ccl_compare.create(),
panel_param_study.create(),
panel_hdf_param_study.create(),
panel_spind.create(),
]
),
row(log_textareainput, sizing_mode="scale_both"),
)
)
def update_stdout():
log_textareainput.value = stream.getvalue()
doc.add_periodic_callback(update_stdout, 1000)

View File

@ -2,41 +2,80 @@ import base64
import io import io
import os import os
import tempfile import tempfile
import types
import numpy as np import numpy as np
from bokeh.io import curdoc from bokeh.io import curdoc
from bokeh.layouts import column, row from bokeh.layouts import column, row
from bokeh.models import ( from bokeh.models import (
BasicTicker,
Button, Button,
CellEditor, CellEditor,
CheckboxEditor, CheckboxEditor,
CheckboxGroup,
ColumnDataSource, ColumnDataSource,
CustomJS,
DataRange1d,
DataTable, DataTable,
Div, Div,
Dropdown,
FileInput, FileInput,
Grid,
Legend,
Line,
LinearAxis,
MultiLine,
MultiSelect, MultiSelect,
NumberEditor,
Panel, Panel,
PanTool,
Plot,
RadioGroup, RadioGroup,
ResetTool,
Scatter,
Select, Select,
Spacer, Spacer,
Span, Span,
Spinner, Spinner,
TableColumn, TableColumn,
TextAreaInput, TextAreaInput,
WheelZoomTool,
Whisker, Whisker,
) )
from bokeh.plotting import figure
import pyzebra import pyzebra
from pyzebra import EXPORT_TARGETS, app from pyzebra.ccl_io import EXPORT_TARGETS
from pyzebra.ccl_process import AREA_METHODS
javaScript = """
let j = 0;
for (let i = 0; i < js_data.data['fname'].length; i++) {
if (js_data.data['content'][i] === "") continue;
setTimeout(function() {
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
const link = document.createElement('a');
document.body.appendChild(link);
const url = window.URL.createObjectURL(blob);
link.href = url;
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
link.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(link);
}, 100 * j)
j++;
}
"""
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger det_data1 = []
dataset1 = [] det_data2 = []
dataset2 = [] fit_params = {}
app_dlfiles = app.DownloadFiles(n_files=2) js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""], ext=["", ""]))
def file_select_update_for_proposal(): def file_select_update_for_proposal():
proposal_path = proposal_textinput.name proposal_path = proposal_textinput.name
@ -60,17 +99,17 @@ def create():
proposal_textinput.on_change("name", proposal_textinput_callback) proposal_textinput.on_change("name", proposal_textinput_callback)
def _init_datatable(): def _init_datatable():
# dataset2 should have the same metadata as dataset1 # det_data2 should have the same metadata to det_data1
scan_list = [s["idx"] for s in dataset1] scan_list = [s["idx"] for s in det_data1]
hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in dataset1] hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in det_data1]
export = [s["export"] for s in dataset1] export = [s["export"] for s in det_data1]
twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in dataset1] twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in det_data1]
gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in dataset1] gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in det_data1]
omega = [np.median(s["omega"]) if "omega" in s else None for s in dataset1] omega = [np.median(s["omega"]) if "omega" in s else None for s in det_data1]
chi = [np.median(s["chi"]) if "chi" in s else None for s in dataset1] chi = [np.median(s["chi"]) if "chi" in s else None for s in det_data1]
phi = [np.median(s["phi"]) if "phi" in s else None for s in dataset1] phi = [np.median(s["phi"]) if "phi" in s else None for s in det_data1]
nu = [np.median(s["nu"]) if "nu" in s else None for s in dataset1] nu = [np.median(s["nu"]) if "nu" in s else None for s in det_data1]
scan_table_source.data.update( scan_table_source.data.update(
scan=scan_list, scan=scan_list,
@ -95,7 +134,7 @@ def create():
def file_open_button_callback(): def file_open_button_callback():
if len(file_select.value) != 2: if len(file_select.value) != 2:
log.warning("Select exactly 2 .ccl files.") print("WARNING: Select exactly 2 .ccl files.")
return return
new_data1 = [] new_data1 = []
@ -105,16 +144,16 @@ def create():
f_name = os.path.basename(f_path) f_name = os.path.basename(f_path)
base, ext = os.path.splitext(f_name) base, ext = os.path.splitext(f_name)
try: try:
file_data = pyzebra.parse_1D(file, ext, log=log) file_data = pyzebra.parse_1D(file, ext)
except Exception as e: except:
log.exception(e) print(f"Error loading {f_name}")
return return
pyzebra.normalize_dataset(file_data, monitor_spinner.value) pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_duplicates(file_data, log=log) pyzebra.merge_duplicates(file_data)
if ind == 0: if ind == 0:
app_dlfiles.set_names([base, base]) js_data.data.update(fname=[base, base])
new_data1 = file_data new_data1 = file_data
else: # ind = 1 else: # ind = 1
new_data2 = file_data new_data2 = file_data
@ -124,9 +163,9 @@ def create():
new_data1 = new_data1[:min_len] new_data1 = new_data1[:min_len]
new_data2 = new_data2[:min_len] new_data2 = new_data2[:min_len]
nonlocal dataset1, dataset2 nonlocal det_data1, det_data2
dataset1 = new_data1 det_data1 = new_data1
dataset2 = new_data2 det_data2 = new_data2
_init_datatable() _init_datatable()
file_open_button = Button(label="Open New", width=100, disabled=True) file_open_button = Button(label="Open New", width=100, disabled=True)
@ -134,7 +173,7 @@ def create():
def upload_button_callback(_attr, _old, _new): def upload_button_callback(_attr, _old, _new):
if len(upload_button.filename) != 2: if len(upload_button.filename) != 2:
log.warning("Upload exactly 2 .ccl files.") print("WARNING: Upload exactly 2 .ccl files.")
return return
new_data1 = [] new_data1 = []
@ -143,16 +182,16 @@ def create():
with io.StringIO(base64.b64decode(f_str).decode()) as file: with io.StringIO(base64.b64decode(f_str).decode()) as file:
base, ext = os.path.splitext(f_name) base, ext = os.path.splitext(f_name)
try: try:
file_data = pyzebra.parse_1D(file, ext, log=log) file_data = pyzebra.parse_1D(file, ext)
except Exception as e: except:
log.exception(e) print(f"Error loading {f_name}")
return return
pyzebra.normalize_dataset(file_data, monitor_spinner.value) pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_duplicates(file_data, log=log) pyzebra.merge_duplicates(file_data)
if ind == 0: if ind == 0:
app_dlfiles.set_names([base, base]) js_data.data.update(fname=[base, base])
new_data1 = file_data new_data1 = file_data
else: # ind = 1 else: # ind = 1
new_data2 = file_data new_data2 = file_data
@ -162,9 +201,9 @@ def create():
new_data1 = new_data1[:min_len] new_data1 = new_data1[:min_len]
new_data2 = new_data2[:min_len] new_data2 = new_data2[:min_len]
nonlocal dataset1, dataset2 nonlocal det_data1, det_data2
dataset1 = new_data1 det_data1 = new_data1
dataset2 = new_data2 det_data2 = new_data2
_init_datatable() _init_datatable()
upload_div = Div(text="or upload 2 .ccl files:", margin=(5, 5, 0, 5)) upload_div = Div(text="or upload 2 .ccl files:", margin=(5, 5, 0, 5))
@ -174,31 +213,31 @@ def create():
upload_button.on_change("filename", upload_button_callback) upload_button.on_change("filename", upload_button_callback)
def monitor_spinner_callback(_attr, old, new): def monitor_spinner_callback(_attr, old, new):
if dataset1 and dataset2: if det_data1 and det_data2:
pyzebra.normalize_dataset(dataset1, new) pyzebra.normalize_dataset(det_data1, new)
pyzebra.normalize_dataset(dataset2, new) pyzebra.normalize_dataset(det_data2, new)
_update_plot() _update_plot()
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145) monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
monitor_spinner.on_change("value", monitor_spinner_callback) monitor_spinner.on_change("value", monitor_spinner_callback)
def _update_table(): def _update_table():
fit_ok = [(1 if "fit" in scan else 0) for scan in dataset1] fit_ok = [(1 if "fit" in scan else 0) for scan in det_data1]
export = [scan["export"] for scan in dataset1] export = [scan["export"] for scan in det_data1]
scan_table_source.data.update(fit=fit_ok, export=export) scan_table_source.data.update(fit=fit_ok, export=export)
def _update_plot(): def _update_plot():
scatter_sources = [scatter1_source, scatter2_source] plot_scatter_source = [plot_scatter1_source, plot_scatter2_source]
fit_sources = [fit1_source, fit2_source] plot_fit_source = [plot_fit1_source, plot_fit2_source]
bkg_sources = [bkg1_source, bkg2_source] plot_bkg_source = [plot_bkg1_source, plot_bkg2_source]
peak_sources = [peak1_source, peak2_source] plot_peak_source = [plot_peak1_source, plot_peak2_source]
fit_output = "" fit_output = ""
for ind, scan in enumerate(_get_selected_scan()): for ind, scan in enumerate(_get_selected_scan()):
scatter_source = scatter_sources[ind] scatter_source = plot_scatter_source[ind]
fit_source = fit_sources[ind] fit_source = plot_fit_source[ind]
bkg_source = bkg_sources[ind] bkg_source = plot_bkg_source[ind]
peak_source = peak_sources[ind] peak_source = plot_peak_source[ind]
scan_motor = scan["scan_motor"] scan_motor = scan["scan_motor"]
y = scan["counts"] y = scan["counts"]
@ -218,7 +257,7 @@ def create():
xs_peak = [] xs_peak = []
ys_peak = [] ys_peak = []
comps = fit.eval_components(x=x_fit) comps = fit.eval_components(x=x_fit)
for i, model in enumerate(app_fitctrl.params): for i, model in enumerate(fit_params):
if "linear" in model: if "linear" in model:
x_bkg = x_fit x_bkg = x_fit
y_bkg = comps[f"f{i}_"] y_bkg = comps[f"f{i}_"]
@ -238,59 +277,62 @@ def create():
bkg_source.data.update(x=[], y=[]) bkg_source.data.update(x=[], y=[])
peak_source.data.update(xs=[], ys=[]) peak_source.data.update(xs=[], ys=[])
app_fitctrl.result_textarea.value = fit_output fit_output_textinput.value = fit_output
# Main plot # Main plot
plot = figure( plot = Plot(
x_axis_label="Scan motor", x_range=DataRange1d(),
y_axis_label="Counts", y_range=DataRange1d(only_visible=True),
height=470, plot_height=470,
width=700, plot_width=700,
tools="pan,wheel_zoom,reset",
) )
scatter1_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0])) plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
plot.circle( plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
source=scatter1_source,
line_color="steelblue", plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
fill_color="steelblue", plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
legend_label="data 1",
plot_scatter1_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
plot_scatter1 = plot.add_glyph(
plot_scatter1_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
) )
plot.add_layout(Whisker(source=scatter1_source, base="x", upper="y_upper", lower="y_lower")) plot.add_layout(
Whisker(source=plot_scatter1_source, base="x", upper="y_upper", lower="y_lower")
scatter2_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
plot.circle(
source=scatter2_source,
line_color="firebrick",
fill_color="firebrick",
legend_label="data 2",
)
plot.add_layout(Whisker(source=scatter2_source, base="x", upper="y_upper", lower="y_lower"))
fit1_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=fit1_source, legend_label="best fit 1")
fit2_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=fit2_source, line_color="firebrick", legend_label="best fit 2")
bkg1_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(
source=bkg1_source, line_color="steelblue", line_dash="dashed", legend_label="linear 1"
) )
bkg2_source = ColumnDataSource(dict(x=[0], y=[0])) plot_scatter2_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
plot.line( plot_scatter2 = plot.add_glyph(
source=bkg2_source, line_color="firebrick", line_dash="dashed", legend_label="linear 2" plot_scatter2_source, Scatter(x="x", y="y", line_color="firebrick", fill_color="firebrick")
)
plot.add_layout(
Whisker(source=plot_scatter2_source, base="x", upper="y_upper", lower="y_lower")
) )
peak1_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]])) plot_fit1_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.multi_line( plot_fit1 = plot.add_glyph(plot_fit1_source, Line(x="x", y="y"))
source=peak1_source, line_color="steelblue", line_dash="dashed", legend_label="peak 1"
plot_fit2_source = ColumnDataSource(dict(x=[0], y=[0]))
plot_fit2 = plot.add_glyph(plot_fit2_source, Line(x="x", y="y"))
plot_bkg1_source = ColumnDataSource(dict(x=[0], y=[0]))
plot_bkg1 = plot.add_glyph(
plot_bkg1_source, Line(x="x", y="y", line_color="steelblue", line_dash="dashed")
) )
peak2_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]])) plot_bkg2_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.multi_line( plot_bkg2 = plot.add_glyph(
source=peak2_source, line_color="firebrick", line_dash="dashed", legend_label="peak 2" plot_bkg2_source, Line(x="x", y="y", line_color="firebrick", line_dash="dashed")
)
plot_peak1_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
plot_peak1 = plot.add_glyph(
plot_peak1_source, MultiLine(xs="xs", ys="ys", line_color="steelblue", line_dash="dashed")
)
plot_peak2_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
plot_peak2 = plot.add_glyph(
plot_peak2_source, MultiLine(xs="xs", ys="ys", line_color="firebrick", line_dash="dashed")
) )
fit_from_span = Span(location=None, dimension="height", line_dash="dashed") fit_from_span = Span(location=None, dimension="height", line_dash="dashed")
@ -299,9 +341,25 @@ def create():
fit_to_span = Span(location=None, dimension="height", line_dash="dashed") fit_to_span = Span(location=None, dimension="height", line_dash="dashed")
plot.add_layout(fit_to_span) plot.add_layout(fit_to_span)
plot.y_range.only_visible = True plot.add_layout(
Legend(
items=[
("data 1", [plot_scatter1]),
("data 2", [plot_scatter2]),
("best fit 1", [plot_fit1]),
("best fit 2", [plot_fit2]),
("peak 1", [plot_peak1]),
("peak 2", [plot_peak2]),
("linear 1", [plot_bkg1]),
("linear 2", [plot_bkg2]),
],
location="top_left",
click_policy="hide",
)
)
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
plot.toolbar.logo = None plot.toolbar.logo = None
plot.legend.click_policy = "hide"
# Scan select # Scan select
def scan_table_select_callback(_attr, old, new): def scan_table_select_callback(_attr, old, new):
@ -324,7 +382,7 @@ def create():
def scan_table_source_callback(_attr, _old, new): def scan_table_source_callback(_attr, _old, new):
# unfortunately, we don't know if the change comes from data update or user input # unfortunately, we don't know if the change comes from data update or user input
# also `old` and `new` are the same for non-scalars # also `old` and `new` are the same for non-scalars
for scan1, scan2, export in zip(dataset1, dataset2, new["export"]): for scan1, scan2, export in zip(det_data1, det_data2, new["export"]):
scan1["export"] = export scan1["export"] = export
scan2["export"] = export scan2["export"] = export
_update_preview() _update_preview()
@ -368,21 +426,21 @@ def create():
def _get_selected_scan(): def _get_selected_scan():
ind = scan_table_source.selected.indices[0] ind = scan_table_source.selected.indices[0]
return dataset1[ind], dataset2[ind] return det_data1[ind], det_data2[ind]
merge_from_select = Select(title="scan:", width=145) merge_from_select = Select(title="scan:", width=145)
def merge_button_callback(): def merge_button_callback():
scan_into1, scan_into2 = _get_selected_scan() scan_into1, scan_into2 = _get_selected_scan()
scan_from1 = dataset1[int(merge_from_select.value)] scan_from1 = det_data1[int(merge_from_select.value)]
scan_from2 = dataset2[int(merge_from_select.value)] scan_from2 = det_data2[int(merge_from_select.value)]
if scan_into1 is scan_from1: if scan_into1 is scan_from1:
log.warning("Selected scans for merging are identical") print("WARNING: Selected scans for merging are identical")
return return
pyzebra.merge_scans(scan_into1, scan_from1, log=log) pyzebra.merge_scans(scan_into1, scan_from1)
pyzebra.merge_scans(scan_into2, scan_from2, log=log) pyzebra.merge_scans(scan_into2, scan_from2)
_update_table() _update_table()
_update_plot() _update_plot()
@ -399,21 +457,136 @@ def create():
restore_button = Button(label="Restore scan", width=145) restore_button = Button(label="Restore scan", width=145)
restore_button.on_click(restore_button_callback) restore_button.on_click(restore_button_callback)
app_fitctrl = app.FitControls()
def fit_from_spinner_callback(_attr, _old, new): def fit_from_spinner_callback(_attr, _old, new):
fit_from_span.location = new fit_from_span.location = new
app_fitctrl.from_spinner.on_change("value", fit_from_spinner_callback) fit_from_spinner = Spinner(title="Fit from:", width=145)
fit_from_spinner.on_change("value", fit_from_spinner_callback)
def fit_to_spinner_callback(_attr, _old, new): def fit_to_spinner_callback(_attr, _old, new):
fit_to_span.location = new fit_to_span.location = new
app_fitctrl.to_spinner.on_change("value", fit_to_spinner_callback) fit_to_spinner = Spinner(title="to:", width=145)
fit_to_spinner.on_change("value", fit_to_spinner_callback)
def fitparams_add_dropdown_callback(click):
# bokeh requires (str, str) for MultiSelect options
new_tag = f"{click.item}-{fitparams_select.tags[0]}"
fitparams_select.options.append((new_tag, click.item))
fit_params[new_tag] = fitparams_factory(click.item)
fitparams_select.tags[0] += 1
fitparams_add_dropdown = Dropdown(
label="Add fit function",
menu=[
("Linear", "linear"),
("Gaussian", "gaussian"),
("Voigt", "voigt"),
("Pseudo Voigt", "pvoigt"),
# ("Pseudo Voigt1", "pseudovoigt1"),
],
width=145,
)
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
def fitparams_select_callback(_attr, old, new):
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
if len(new) > 1:
# drop selection to the previous one
fitparams_select.value = old
return
if len(old) > 1:
# skip unnecessary update caused by selection drop
return
if new:
fitparams_table_source.data.update(fit_params[new[0]])
else:
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_select = MultiSelect(options=[], height=120, width=145)
fitparams_select.tags = [0]
fitparams_select.on_change("value", fitparams_select_callback)
def fitparams_remove_button_callback():
if fitparams_select.value:
sel_tag = fitparams_select.value[0]
del fit_params[sel_tag]
for elem in fitparams_select.options:
if elem[0] == sel_tag:
fitparams_select.options.remove(elem)
break
fitparams_select.value = []
fitparams_remove_button = Button(label="Remove fit function", width=145)
fitparams_remove_button.on_click(fitparams_remove_button_callback)
def fitparams_factory(function):
if function == "linear":
params = ["slope", "intercept"]
elif function == "gaussian":
params = ["amplitude", "center", "sigma"]
elif function == "voigt":
params = ["amplitude", "center", "sigma", "gamma"]
elif function == "pvoigt":
params = ["amplitude", "center", "sigma", "fraction"]
elif function == "pseudovoigt1":
params = ["amplitude", "center", "g_sigma", "l_sigma", "fraction"]
else:
raise ValueError("Unknown fit function")
n = len(params)
fitparams = dict(
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
)
if function == "linear":
fitparams["value"] = [0, 1]
fitparams["vary"] = [False, True]
fitparams["min"] = [None, 0]
elif function == "gaussian":
fitparams["min"] = [0, None, None]
return fitparams
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_table = DataTable(
source=fitparams_table_source,
columns=[
TableColumn(field="param", title="Parameter", editor=CellEditor()),
TableColumn(field="value", title="Value", editor=NumberEditor()),
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
TableColumn(field="min", title="Min", editor=NumberEditor()),
TableColumn(field="max", title="Max", editor=NumberEditor()),
],
height=200,
width=350,
index_position=None,
editable=True,
auto_edit=True,
)
# start with `background` and `gauss` fit functions added
fitparams_add_dropdown_callback(types.SimpleNamespace(item="linear"))
fitparams_add_dropdown_callback(types.SimpleNamespace(item="gaussian"))
fitparams_select.value = ["gaussian-1"] # add selection to gauss
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
def proc_all_button_callback(): def proc_all_button_callback():
app_fitctrl.fit_dataset(dataset1) for scan in [*det_data1, *det_data2]:
app_fitctrl.fit_dataset(dataset2) if scan["export"]:
pyzebra.fit_scan(
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_plot() _update_plot()
_update_table() _update_table()
@ -422,9 +595,15 @@ def create():
proc_all_button.on_click(proc_all_button_callback) proc_all_button.on_click(proc_all_button_callback)
def proc_button_callback(): def proc_button_callback():
scan1, scan2 = _get_selected_scan() for scan in _get_selected_scan():
app_fitctrl.fit_scan(scan1) pyzebra.fit_scan(
app_fitctrl.fit_scan(scan2) scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_plot() _update_plot()
_update_table() _update_table()
@ -432,11 +611,16 @@ def create():
proc_button = Button(label="Process Current", width=145) proc_button = Button(label="Process Current", width=145)
proc_button.on_click(proc_button_callback) proc_button.on_click(proc_button_callback)
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
intensity_diff_div = Div(text="Intensity difference:", margin=(5, 5, 0, 5)) intensity_diff_div = Div(text="Intensity difference:", margin=(5, 5, 0, 5))
intensity_diff_radiobutton = RadioGroup( intensity_diff_radiobutton = RadioGroup(
labels=["file1 - file2", "file2 - file1"], active=0, width=145 labels=["file1 - file2", "file2 - file1"], active=0, width=145
) )
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400) export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400)
def _update_preview(): def _update_preview():
@ -444,7 +628,7 @@ def create():
temp_file = temp_dir + "/temp" temp_file = temp_dir + "/temp"
export_data1 = [] export_data1 = []
export_data2 = [] export_data2 = []
for scan1, scan2 in zip(dataset1, dataset2): for scan1, scan2 in zip(det_data1, det_data2):
if scan1["export"]: if scan1["export"]:
export_data1.append(scan1) export_data1.append(scan1)
export_data2.append(scan2) export_data2.append(scan2)
@ -472,18 +656,18 @@ def create():
content = "" content = ""
file_content.append(content) file_content.append(content)
app_dlfiles.set_contents(file_content) js_data.data.update(content=file_content)
export_preview_textinput.value = exported_content export_preview_textinput.value = exported_content
def export_target_select_callback(_attr, _old, new): def export_target_select_callback(_attr, _old, new):
app_dlfiles.set_extensions(EXPORT_TARGETS[new]) js_data.data.update(ext=EXPORT_TARGETS[new])
_update_preview() _update_preview()
export_target_select = Select( export_target_select = Select(
title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80 title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80
) )
export_target_select.on_change("value", export_target_select_callback) export_target_select.on_change("value", export_target_select_callback)
app_dlfiles.set_extensions(EXPORT_TARGETS[export_target_select.value]) js_data.data.update(ext=EXPORT_TARGETS[export_target_select.value])
def hkl_precision_select_callback(_attr, _old, _new): def hkl_precision_select_callback(_attr, _old, _new):
_update_preview() _update_preview()
@ -493,24 +677,22 @@ def create():
) )
hkl_precision_select.on_change("value", hkl_precision_select_callback) hkl_precision_select.on_change("value", hkl_precision_select_callback)
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5)) save_button = Button(label="Download File(s)", button_type="success", width=200)
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
fitpeak_controls = row( fitpeak_controls = row(
column( column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
app_fitctrl.add_function_button, fitparams_table,
app_fitctrl.function_select,
app_fitctrl.remove_function_button,
),
app_fitctrl.params_table,
Spacer(width=20), Spacer(width=20),
column( column(
app_fitctrl.from_spinner, fit_from_spinner,
app_fitctrl.lorentz_checkbox, lorentz_checkbox,
area_method_div, area_method_div,
app_fitctrl.area_method_radiogroup, area_method_radiobutton,
intensity_diff_div, intensity_diff_div,
intensity_diff_radiobutton, intensity_diff_radiobutton,
), ),
column(app_fitctrl.to_spinner, proc_button, proc_all_button), column(fit_to_spinner, proc_button, proc_all_button),
) )
scan_layout = column( scan_layout = column(
@ -524,15 +706,13 @@ def create():
export_layout = column( export_layout = column(
export_preview_textinput, export_preview_textinput,
row( row(
export_target_select, export_target_select, hkl_precision_select, column(Spacer(height=19), row(save_button))
hkl_precision_select,
column(Spacer(height=19), row(app_dlfiles.button)),
), ),
) )
tab_layout = column( tab_layout = column(
row(import_layout, scan_layout, plot, Spacer(width=30), export_layout), row(import_layout, scan_layout, plot, Spacer(width=30), export_layout),
row(fitpeak_controls, app_fitctrl.result_textarea), row(fitpeak_controls, fit_output_textinput),
) )
return Panel(child=tab_layout, title="ccl compare") return Panel(child=tab_layout, title="ccl compare")

View File

@ -1,47 +1,115 @@
import base64
import io
import os import os
import tempfile import tempfile
import types
import numpy as np import numpy as np
from bokeh.io import curdoc from bokeh.io import curdoc
from bokeh.layouts import column, row from bokeh.layouts import column, row
from bokeh.models import ( from bokeh.models import (
BasicTicker,
Button, Button,
CellEditor, CellEditor,
CheckboxEditor, CheckboxEditor,
CheckboxGroup,
ColumnDataSource, ColumnDataSource,
CustomJS,
DataRange1d,
DataTable, DataTable,
Div, Div,
Dropdown,
FileInput,
Grid,
Legend,
Line,
LinearAxis,
MultiLine,
MultiSelect,
NumberEditor,
Panel, Panel,
PanTool,
Plot,
RadioGroup,
ResetTool,
Scatter,
Select, Select,
Spacer, Spacer,
Span, Span,
Spinner,
TableColumn, TableColumn,
TextAreaInput, TextAreaInput,
WheelZoomTool,
Whisker, Whisker,
) )
from bokeh.plotting import figure
import pyzebra import pyzebra
from pyzebra import EXPORT_TARGETS, app from pyzebra.ccl_io import EXPORT_TARGETS
from pyzebra.ccl_process import AREA_METHODS
javaScript = """
let j = 0;
for (let i = 0; i < js_data.data['fname'].length; i++) {
if (js_data.data['content'][i] === "") continue;
setTimeout(function() {
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
const link = document.createElement('a');
document.body.appendChild(link);
const url = window.URL.createObjectURL(blob);
link.href = url;
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
link.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(link);
}, 100 * j)
j++;
}
"""
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger det_data = []
dataset = [] fit_params = {}
app_dlfiles = app.DownloadFiles(n_files=2) js_data = ColumnDataSource(data=dict(content=["", ""], fname=["", ""], ext=["", ""]))
def file_select_update_for_proposal():
proposal_path = proposal_textinput.name
if proposal_path:
file_list = []
for file in os.listdir(proposal_path):
if file.endswith((".ccl", ".dat")):
file_list.append((os.path.join(proposal_path, file), file))
file_select.options = file_list
file_open_button.disabled = False
file_append_button.disabled = False
else:
file_select.options = []
file_open_button.disabled = True
file_append_button.disabled = True
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
def proposal_textinput_callback(_attr, _old, _new):
file_select_update_for_proposal()
proposal_textinput = doc.proposal_textinput
proposal_textinput.on_change("name", proposal_textinput_callback)
def _init_datatable(): def _init_datatable():
scan_list = [s["idx"] for s in dataset] scan_list = [s["idx"] for s in det_data]
hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in dataset] hkl = [f'{s["h"]} {s["k"]} {s["l"]}' for s in det_data]
export = [s["export"] for s in dataset] export = [s["export"] for s in det_data]
twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in dataset] twotheta = [np.median(s["twotheta"]) if "twotheta" in s else None for s in det_data]
gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in dataset] gamma = [np.median(s["gamma"]) if "gamma" in s else None for s in det_data]
omega = [np.median(s["omega"]) if "omega" in s else None for s in dataset] omega = [np.median(s["omega"]) if "omega" in s else None for s in det_data]
chi = [np.median(s["chi"]) if "chi" in s else None for s in dataset] chi = [np.median(s["chi"]) if "chi" in s else None for s in det_data]
phi = [np.median(s["phi"]) if "phi" in s else None for s in dataset] phi = [np.median(s["phi"]) if "phi" in s else None for s in det_data]
nu = [np.median(s["nu"]) if "nu" in s else None for s in dataset] nu = [np.median(s["nu"]) if "nu" in s else None for s in det_data]
scan_table_source.data.update( scan_table_source.data.update(
scan=scan_list, scan=scan_list,
@ -62,9 +130,125 @@ def create():
merge_from_select.options = merge_options merge_from_select.options = merge_options
merge_from_select.value = merge_options[0][0] merge_from_select.value = merge_options[0][0]
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
def file_open_button_callback():
nonlocal det_data
new_data = []
for f_path in file_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data)
js_data.data.update(fname=[base, base])
else:
pyzebra.merge_datasets(new_data, file_data)
if new_data:
det_data = new_data
_init_datatable()
append_upload_button.disabled = False
file_open_button = Button(label="Open New", width=100, disabled=True)
file_open_button.on_click(file_open_button_callback)
def file_append_button_callback():
file_data = []
for f_path in file_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(det_data, file_data)
if file_data:
_init_datatable()
file_append_button = Button(label="Append", width=100, disabled=True)
file_append_button.on_click(file_append_button_callback)
def upload_button_callback(_attr, _old, _new):
nonlocal det_data
new_data = []
for f_str, f_name in zip(upload_button.value, upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data)
js_data.data.update(fname=[base, base])
else:
pyzebra.merge_datasets(new_data, file_data)
if new_data:
det_data = new_data
_init_datatable()
append_upload_button.disabled = False
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
upload_button.on_change("filename", upload_button_callback)
def append_upload_button_callback(_attr, _old, _new):
file_data = []
for f_str, f_name in zip(append_upload_button.value, append_upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(det_data, file_data)
if file_data:
_init_datatable()
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
append_upload_button.on_change("filename", append_upload_button_callback)
def monitor_spinner_callback(_attr, old, new):
if det_data:
pyzebra.normalize_dataset(det_data, new)
_update_plot()
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
monitor_spinner.on_change("value", monitor_spinner_callback)
def _update_table(): def _update_table():
fit_ok = [(1 if "fit" in scan else 0) for scan in dataset] fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
export = [scan["export"] for scan in dataset] export = [scan["export"] for scan in det_data]
scan_table_source.data.update(fit=fit_ok, export=export) scan_table_source.data.update(fit=fit_ok, export=export)
def _update_plot(): def _update_plot():
@ -76,19 +260,19 @@ def create():
x = scan[scan_motor] x = scan[scan_motor]
plot.axis[0].axis_label = scan_motor plot.axis[0].axis_label = scan_motor
scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err) plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
fit = scan.get("fit") fit = scan.get("fit")
if fit is not None: if fit is not None:
x_fit = np.linspace(x[0], x[-1], 100) x_fit = np.linspace(x[0], x[-1], 100)
fit_source.data.update(x=x_fit, y=fit.eval(x=x_fit)) plot_fit_source.data.update(x=x_fit, y=fit.eval(x=x_fit))
x_bkg = [] x_bkg = []
y_bkg = [] y_bkg = []
xs_peak = [] xs_peak = []
ys_peak = [] ys_peak = []
comps = fit.eval_components(x=x_fit) comps = fit.eval_components(x=x_fit)
for i, model in enumerate(app_fitctrl.params): for i, model in enumerate(fit_params):
if "linear" in model: if "linear" in model:
x_bkg = x_fit x_bkg = x_fit
y_bkg = comps[f"f{i}_"] y_bkg = comps[f"f{i}_"]
@ -97,43 +281,49 @@ def create():
xs_peak.append(x_fit) xs_peak.append(x_fit)
ys_peak.append(comps[f"f{i}_"]) ys_peak.append(comps[f"f{i}_"])
bkg_source.data.update(x=x_bkg, y=y_bkg) plot_bkg_source.data.update(x=x_bkg, y=y_bkg)
peak_source.data.update(xs=xs_peak, ys=ys_peak) plot_peak_source.data.update(xs=xs_peak, ys=ys_peak)
fit_output_textinput.value = fit.fit_report()
else: else:
fit_source.data.update(x=[], y=[]) plot_fit_source.data.update(x=[], y=[])
bkg_source.data.update(x=[], y=[]) plot_bkg_source.data.update(x=[], y=[])
peak_source.data.update(xs=[], ys=[]) plot_peak_source.data.update(xs=[], ys=[])
fit_output_textinput.value = ""
app_fitctrl.update_result_textarea(scan)
app_inputctrl = app.InputControls(
dataset, app_dlfiles, on_file_open=_init_datatable, on_monitor_change=_update_plot
)
# Main plot # Main plot
plot = figure( plot = Plot(
x_axis_label="Scan motor", x_range=DataRange1d(),
y_axis_label="Counts", y_range=DataRange1d(only_visible=True),
height=470, plot_height=470,
width=700, plot_width=700,
tools="pan,wheel_zoom,reset",
) )
scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0])) plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
plot.circle( plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
source=scatter_source, line_color="steelblue", fill_color="steelblue", legend_label="data"
plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
plot_scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
plot_scatter = plot.add_glyph(
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
) )
plot.add_layout(Whisker(source=scatter_source, base="x", upper="y_upper", lower="y_lower")) plot.add_layout(Whisker(source=plot_scatter_source, base="x", upper="y_upper", lower="y_lower"))
fit_source = ColumnDataSource(dict(x=[0], y=[0])) plot_fit_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=fit_source, legend_label="best fit") plot_fit = plot.add_glyph(plot_fit_source, Line(x="x", y="y"))
bkg_source = ColumnDataSource(dict(x=[0], y=[0])) plot_bkg_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=bkg_source, line_color="green", line_dash="dashed", legend_label="linear") plot_bkg = plot.add_glyph(
plot_bkg_source, Line(x="x", y="y", line_color="green", line_dash="dashed")
)
peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]])) plot_peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
plot.multi_line(source=peak_source, line_color="red", line_dash="dashed", legend_label="peak") plot_peak = plot.add_glyph(
plot_peak_source, MultiLine(xs="xs", ys="ys", line_color="red", line_dash="dashed")
)
fit_from_span = Span(location=None, dimension="height", line_dash="dashed") fit_from_span = Span(location=None, dimension="height", line_dash="dashed")
plot.add_layout(fit_from_span) plot.add_layout(fit_from_span)
@ -141,9 +331,21 @@ def create():
fit_to_span = Span(location=None, dimension="height", line_dash="dashed") fit_to_span = Span(location=None, dimension="height", line_dash="dashed")
plot.add_layout(fit_to_span) plot.add_layout(fit_to_span)
plot.y_range.only_visible = True plot.add_layout(
Legend(
items=[
("data", [plot_scatter]),
("best fit", [plot_fit]),
("peak", [plot_peak]),
("linear", [plot_bkg]),
],
location="top_left",
click_policy="hide",
)
)
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
plot.toolbar.logo = None plot.toolbar.logo = None
plot.legend.click_policy = "hide"
# Scan select # Scan select
def scan_table_select_callback(_attr, old, new): def scan_table_select_callback(_attr, old, new):
@ -166,7 +368,7 @@ def create():
def scan_table_source_callback(_attr, _old, new): def scan_table_source_callback(_attr, _old, new):
# unfortunately, we don't know if the change comes from data update or user input # unfortunately, we don't know if the change comes from data update or user input
# also `old` and `new` are the same for non-scalars # also `old` and `new` are the same for non-scalars
for scan, export in zip(dataset, new["export"]): for scan, export in zip(det_data, new["export"]):
scan["export"] = export scan["export"] = export
_update_preview() _update_preview()
@ -208,19 +410,19 @@ def create():
) )
def _get_selected_scan(): def _get_selected_scan():
return dataset[scan_table_source.selected.indices[0]] return det_data[scan_table_source.selected.indices[0]]
merge_from_select = Select(title="scan:", width=145) merge_from_select = Select(title="scan:", width=145)
def merge_button_callback(): def merge_button_callback():
scan_into = _get_selected_scan() scan_into = _get_selected_scan()
scan_from = dataset[int(merge_from_select.value)] scan_from = det_data[int(merge_from_select.value)]
if scan_into is scan_from: if scan_into is scan_from:
log.warning("Selected scans for merging are identical") print("WARNING: Selected scans for merging are identical")
return return
pyzebra.merge_scans(scan_into, scan_from, log=log) pyzebra.merge_scans(scan_into, scan_from)
_update_table() _update_table()
_update_plot() _update_plot()
@ -235,20 +437,136 @@ def create():
restore_button = Button(label="Restore scan", width=145) restore_button = Button(label="Restore scan", width=145)
restore_button.on_click(restore_button_callback) restore_button.on_click(restore_button_callback)
app_fitctrl = app.FitControls()
def fit_from_spinner_callback(_attr, _old, new): def fit_from_spinner_callback(_attr, _old, new):
fit_from_span.location = new fit_from_span.location = new
app_fitctrl.from_spinner.on_change("value", fit_from_spinner_callback) fit_from_spinner = Spinner(title="Fit from:", width=145)
fit_from_spinner.on_change("value", fit_from_spinner_callback)
def fit_to_spinner_callback(_attr, _old, new): def fit_to_spinner_callback(_attr, _old, new):
fit_to_span.location = new fit_to_span.location = new
app_fitctrl.to_spinner.on_change("value", fit_to_spinner_callback) fit_to_spinner = Spinner(title="to:", width=145)
fit_to_spinner.on_change("value", fit_to_spinner_callback)
def fitparams_add_dropdown_callback(click):
# bokeh requires (str, str) for MultiSelect options
new_tag = f"{click.item}-{fitparams_select.tags[0]}"
fitparams_select.options.append((new_tag, click.item))
fit_params[new_tag] = fitparams_factory(click.item)
fitparams_select.tags[0] += 1
fitparams_add_dropdown = Dropdown(
label="Add fit function",
menu=[
("Linear", "linear"),
("Gaussian", "gaussian"),
("Voigt", "voigt"),
("Pseudo Voigt", "pvoigt"),
# ("Pseudo Voigt1", "pseudovoigt1"),
],
width=145,
)
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
def fitparams_select_callback(_attr, old, new):
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
if len(new) > 1:
# drop selection to the previous one
fitparams_select.value = old
return
if len(old) > 1:
# skip unnecessary update caused by selection drop
return
if new:
fitparams_table_source.data.update(fit_params[new[0]])
else:
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_select = MultiSelect(options=[], height=120, width=145)
fitparams_select.tags = [0]
fitparams_select.on_change("value", fitparams_select_callback)
def fitparams_remove_button_callback():
if fitparams_select.value:
sel_tag = fitparams_select.value[0]
del fit_params[sel_tag]
for elem in fitparams_select.options:
if elem[0] == sel_tag:
fitparams_select.options.remove(elem)
break
fitparams_select.value = []
fitparams_remove_button = Button(label="Remove fit function", width=145)
fitparams_remove_button.on_click(fitparams_remove_button_callback)
def fitparams_factory(function):
if function == "linear":
params = ["slope", "intercept"]
elif function == "gaussian":
params = ["amplitude", "center", "sigma"]
elif function == "voigt":
params = ["amplitude", "center", "sigma", "gamma"]
elif function == "pvoigt":
params = ["amplitude", "center", "sigma", "fraction"]
elif function == "pseudovoigt1":
params = ["amplitude", "center", "g_sigma", "l_sigma", "fraction"]
else:
raise ValueError("Unknown fit function")
n = len(params)
fitparams = dict(
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
)
if function == "linear":
fitparams["value"] = [0, 1]
fitparams["vary"] = [False, True]
fitparams["min"] = [None, 0]
elif function == "gaussian":
fitparams["min"] = [0, None, None]
return fitparams
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_table = DataTable(
source=fitparams_table_source,
columns=[
TableColumn(field="param", title="Parameter", editor=CellEditor()),
TableColumn(field="value", title="Value", editor=NumberEditor()),
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
TableColumn(field="min", title="Min", editor=NumberEditor()),
TableColumn(field="max", title="Max", editor=NumberEditor()),
],
height=200,
width=350,
index_position=None,
editable=True,
auto_edit=True,
)
# start with `background` and `gauss` fit functions added
fitparams_add_dropdown_callback(types.SimpleNamespace(item="linear"))
fitparams_add_dropdown_callback(types.SimpleNamespace(item="gaussian"))
fitparams_select.value = ["gaussian-1"] # add selection to gauss
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
def proc_all_button_callback(): def proc_all_button_callback():
app_fitctrl.fit_dataset(dataset) for scan in det_data:
if scan["export"]:
pyzebra.fit_scan(
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_plot() _update_plot()
_update_table() _update_table()
@ -257,7 +575,15 @@ def create():
proc_all_button.on_click(proc_all_button_callback) proc_all_button.on_click(proc_all_button_callback)
def proc_button_callback(): def proc_button_callback():
app_fitctrl.fit_scan(_get_selected_scan()) scan = _get_selected_scan()
pyzebra.fit_scan(
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_plot() _update_plot()
_update_table() _update_table()
@ -265,13 +591,18 @@ def create():
proc_button = Button(label="Process Current", width=145) proc_button = Button(label="Process Current", width=145)
proc_button.on_click(proc_button_callback) proc_button.on_click(proc_button_callback)
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400) export_preview_textinput = TextAreaInput(title="Export file(s) preview:", width=500, height=400)
def _update_preview(): def _update_preview():
with tempfile.TemporaryDirectory() as temp_dir: with tempfile.TemporaryDirectory() as temp_dir:
temp_file = temp_dir + "/temp" temp_file = temp_dir + "/temp"
export_data = [] export_data = []
for scan in dataset: for scan in det_data:
if scan["export"]: if scan["export"]:
export_data.append(scan) export_data.append(scan)
@ -294,18 +625,18 @@ def create():
content = "" content = ""
file_content.append(content) file_content.append(content)
app_dlfiles.set_contents(file_content) js_data.data.update(content=file_content)
export_preview_textinput.value = exported_content export_preview_textinput.value = exported_content
def export_target_select_callback(_attr, _old, new): def export_target_select_callback(_attr, _old, new):
app_dlfiles.set_extensions(EXPORT_TARGETS[new]) js_data.data.update(ext=EXPORT_TARGETS[new])
_update_preview() _update_preview()
export_target_select = Select( export_target_select = Select(
title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80 title="Export target:", options=list(EXPORT_TARGETS.keys()), value="fullprof", width=80
) )
export_target_select.on_change("value", export_target_select_callback) export_target_select.on_change("value", export_target_select_callback)
app_dlfiles.set_extensions(EXPORT_TARGETS[export_target_select.value]) js_data.data.update(ext=EXPORT_TARGETS[export_target_select.value])
def hkl_precision_select_callback(_attr, _old, _new): def hkl_precision_select_callback(_attr, _old, _new):
_update_preview() _update_preview()
@ -315,53 +646,42 @@ def create():
) )
hkl_precision_select.on_change("value", hkl_precision_select_callback) hkl_precision_select.on_change("value", hkl_precision_select_callback)
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5)) save_button = Button(label="Download File(s)", button_type="success", width=200)
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
fitpeak_controls = row( fitpeak_controls = row(
column( column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
app_fitctrl.add_function_button, fitparams_table,
app_fitctrl.function_select,
app_fitctrl.remove_function_button,
),
app_fitctrl.params_table,
Spacer(width=20), Spacer(width=20),
column( column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
app_fitctrl.from_spinner, column(fit_to_spinner, proc_button, proc_all_button),
app_fitctrl.lorentz_checkbox,
area_method_div,
app_fitctrl.area_method_radiogroup,
),
column(app_fitctrl.to_spinner, proc_button, proc_all_button),
) )
scan_layout = column( scan_layout = column(
scan_table, scan_table,
row(app_inputctrl.monitor_spinner, column(Spacer(height=19), restore_button)), row(monitor_spinner, column(Spacer(height=19), restore_button)),
row(column(Spacer(height=19), merge_button), merge_from_select), row(column(Spacer(height=19), merge_button), merge_from_select),
) )
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
import_layout = column( import_layout = column(
app_inputctrl.filelist_select, file_select,
row(app_inputctrl.open_button, app_inputctrl.append_button), row(file_open_button, file_append_button),
upload_div, upload_div,
app_inputctrl.upload_button, upload_button,
append_upload_div, append_upload_div,
app_inputctrl.append_upload_button, append_upload_button,
) )
export_layout = column( export_layout = column(
export_preview_textinput, export_preview_textinput,
row( row(
export_target_select, export_target_select, hkl_precision_select, column(Spacer(height=19), row(save_button))
hkl_precision_select,
column(Spacer(height=19), row(app_dlfiles.button)),
), ),
) )
tab_layout = column( tab_layout = column(
row(import_layout, scan_layout, plot, Spacer(width=30), export_layout), row(import_layout, scan_layout, plot, Spacer(width=30), export_layout),
row(fitpeak_controls, app_fitctrl.result_textarea), row(fitpeak_controls, fit_output_textinput),
) )
return Panel(child=tab_layout, title="ccl integrate") return Panel(child=tab_layout, title="ccl integrate")

View File

@ -1,724 +0,0 @@
import base64
import io
import os
import subprocess
import tempfile
import numpy as np
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import (
Arrow,
Button,
CheckboxGroup,
ColumnDataSource,
Div,
FileInput,
HoverTool,
Legend,
LegendItem,
MultiSelect,
NormalHead,
NumericInput,
Panel,
RadioGroup,
Select,
Spacer,
Spinner,
TextAreaInput,
TextInput,
)
from bokeh.palettes import Dark2
from bokeh.plotting import figure
import pyzebra
from pyzebra import app
ANG_CHUNK_DEFAULTS = {"2theta": 30, "gamma": 30, "omega": 30, "chi": 35, "phi": 35, "nu": 10}
SORT_OPT_BI = ["2theta", "chi", "phi", "omega"]
SORT_OPT_NB = ["gamma", "nu", "omega"]
def create():
doc = curdoc()
log = doc.logger
ang_lims = {}
cif_data = {}
params = {}
res_files = {}
_update_slice = None
app_dlfiles = app.DownloadFiles(n_files=1)
anglim_div = Div(text="Angular min/max limits:", margin=(5, 5, 0, 5))
sttgamma_ti = TextInput(title="stt/gamma", width=100)
omega_ti = TextInput(title="omega", width=100)
chinu_ti = TextInput(title="chi/nu", width=100)
phi_ti = TextInput(title="phi", width=100)
def _update_ang_lims(ang_lims):
sttgamma_ti.value = " ".join(ang_lims["gamma"][:2])
omega_ti.value = " ".join(ang_lims["omega"][:2])
if ang_lims["geom"] == "nb":
chinu_ti.value = " ".join(ang_lims["nu"][:2])
phi_ti.value = ""
else: # ang_lims["geom"] == "bi"
chinu_ti.value = " ".join(ang_lims["chi"][:2])
phi_ti.value = " ".join(ang_lims["phi"][:2])
def _update_params(params):
if "WAVE" in params:
wavelen_input.value = params["WAVE"]
if "SPGR" in params:
cryst_space_group.value = params["SPGR"]
if "CELL" in params:
cryst_cell.value = params["CELL"]
if "UBMAT" in params:
ub_matrix.value = " ".join(params["UBMAT"])
if "HLIM" in params:
ranges_hkl.value = params["HLIM"]
if "SRANG" in params:
ranges_srang.value = params["SRANG"]
if "lattiCE" in params:
magstruct_lattice.value = params["lattiCE"]
if "kvect" in params:
magstruct_kvec.value = params["kvect"]
def open_geom_callback(_attr, _old, new):
nonlocal ang_lims
with io.StringIO(base64.b64decode(new).decode()) as fileobj:
ang_lims = pyzebra.read_geom_file(fileobj)
_update_ang_lims(ang_lims)
open_geom_div = Div(text="Open GEOM:")
open_geom = FileInput(accept=".geom", width=200)
open_geom.on_change("value", open_geom_callback)
def open_cfl_callback(_attr, _old, new):
nonlocal params
with io.StringIO(base64.b64decode(new).decode()) as fileobj:
params = pyzebra.read_cfl_file(fileobj)
_update_params(params)
open_cfl_div = Div(text="Open CFL:")
open_cfl = FileInput(accept=".cfl", width=200)
open_cfl.on_change("value", open_cfl_callback)
def open_cif_callback(_attr, _old, new):
nonlocal cif_data
with io.StringIO(base64.b64decode(new).decode()) as fileobj:
cif_data = pyzebra.read_cif_file(fileobj)
_update_params(cif_data)
open_cif_div = Div(text="Open CIF:")
open_cif = FileInput(accept=".cif", width=200)
open_cif.on_change("value", open_cif_callback)
wavelen_div = Div(text="Wavelength:", margin=(5, 5, 0, 5))
wavelen_input = TextInput(title="value", width=70)
def wavelen_select_callback(_attr, _old, new):
if new:
wavelen_input.value = new
else:
wavelen_input.value = ""
wavelen_select = Select(
title="preset", options=["", "0.788", "1.178", "1.383", "2.305"], width=70
)
wavelen_select.on_change("value", wavelen_select_callback)
cryst_div = Div(text="Crystal structure:", margin=(5, 5, 0, 5))
cryst_space_group = TextInput(title="space group", width=100)
cryst_cell = TextInput(title="cell", width=250)
def ub_matrix_calc_callback():
params = dict()
params["SPGR"] = cryst_space_group.value
params["CELL"] = cryst_cell.value
try:
ub = pyzebra.calc_ub_matrix(params, log=log)
except Exception as e:
log.exception(e)
return
ub_matrix.value = " ".join(ub)
ub_matrix_calc = Button(label="UB matrix:", button_type="primary", width=100)
ub_matrix_calc.on_click(ub_matrix_calc_callback)
ub_matrix = TextInput(title="\u200B", width=600)
ranges_div = Div(text="Ranges:", margin=(5, 5, 0, 5))
ranges_hkl = TextInput(title="HKL", value="-25 25 -25 25 -25 25", width=250)
ranges_srang = TextInput(title="sin(​θ​)/λ", value="0.0 0.7", width=100)
magstruct_div = Div(text="Magnetic structure:", margin=(5, 5, 0, 5))
magstruct_lattice = TextInput(title="lattice", width=100)
magstruct_kvec = TextAreaInput(title="k vector", width=150)
def sorting0_callback(_attr, _old, new):
sorting_0_dt.value = ANG_CHUNK_DEFAULTS[new]
def sorting1_callback(_attr, _old, new):
sorting_1_dt.value = ANG_CHUNK_DEFAULTS[new]
def sorting2_callback(_attr, _old, new):
sorting_2_dt.value = ANG_CHUNK_DEFAULTS[new]
sorting_0 = Select(title="1st", width=100)
sorting_0.on_change("value", sorting0_callback)
sorting_0_dt = NumericInput(title="Δ", width=70)
sorting_1 = Select(title="2nd", width=100)
sorting_1.on_change("value", sorting1_callback)
sorting_1_dt = NumericInput(title="Δ", width=70)
sorting_2 = Select(title="3rd", width=100)
sorting_2.on_change("value", sorting2_callback)
sorting_2_dt = NumericInput(title="Δ", width=70)
def geom_radiogroup_callback(_attr, _old, new):
nonlocal ang_lims, params
if new == 0:
geom_file = pyzebra.get_zebraBI_default_geom_file()
sort_opt = SORT_OPT_BI
else:
geom_file = pyzebra.get_zebraNB_default_geom_file()
sort_opt = SORT_OPT_NB
cfl_file = pyzebra.get_zebra_default_cfl_file()
ang_lims = pyzebra.read_geom_file(geom_file)
_update_ang_lims(ang_lims)
params = pyzebra.read_cfl_file(cfl_file)
_update_params(params)
sorting_0.options = sorting_1.options = sorting_2.options = sort_opt
sorting_0.value = sort_opt[0]
sorting_1.value = sort_opt[1]
sorting_2.value = sort_opt[2]
geom_radiogroup_div = Div(text="Geometry:", margin=(5, 5, 0, 5))
geom_radiogroup = RadioGroup(labels=["bisecting", "normal beam"], width=150)
geom_radiogroup.on_change("active", geom_radiogroup_callback)
geom_radiogroup.active = 0
def go_button_callback():
ang_lims["gamma"][0], ang_lims["gamma"][1] = sttgamma_ti.value.strip().split()
ang_lims["omega"][0], ang_lims["omega"][1] = omega_ti.value.strip().split()
if ang_lims["geom"] == "nb":
ang_lims["nu"][0], ang_lims["nu"][1] = chinu_ti.value.strip().split()
else: # ang_lims["geom"] == "bi"
ang_lims["chi"][0], ang_lims["chi"][1] = chinu_ti.value.strip().split()
ang_lims["phi"][0], ang_lims["phi"][1] = phi_ti.value.strip().split()
if cif_data:
params.update(cif_data)
params["WAVE"] = wavelen_input.value
params["SPGR"] = cryst_space_group.value
params["CELL"] = cryst_cell.value
params["UBMAT"] = ub_matrix.value.split()
params["HLIM"] = ranges_hkl.value
params["SRANG"] = ranges_srang.value
params["lattiCE"] = magstruct_lattice.value
kvects = magstruct_kvec.value.split("\n")
with tempfile.TemporaryDirectory() as temp_dir:
geom_path = os.path.join(temp_dir, "zebra.geom")
if open_geom.value:
geom_template = io.StringIO(base64.b64decode(open_geom.value).decode())
else:
geom_template = None
pyzebra.export_geom_file(geom_path, ang_lims, geom_template)
log.info(f"Content of {geom_path}:")
with open(geom_path) as f:
log.info(f.read())
priority = [sorting_0.value, sorting_1.value, sorting_2.value]
chunks = [sorting_0_dt.value, sorting_1_dt.value, sorting_2_dt.value]
if geom_radiogroup.active == 0:
sort_hkl_file = pyzebra.sort_hkl_file_bi
priority.extend(set(SORT_OPT_BI) - set(priority))
else:
sort_hkl_file = pyzebra.sort_hkl_file_nb
# run sxtal_refgen for each kvect provided
for i, kvect in enumerate(kvects, start=1):
params["kvect"] = kvect
if open_cfl.filename:
base_fname = f"{os.path.splitext(open_cfl.filename)[0]}_{i}"
else:
base_fname = f"zebra_{i}"
cfl_path = os.path.join(temp_dir, base_fname + ".cfl")
if open_cfl.value:
cfl_template = io.StringIO(base64.b64decode(open_cfl.value).decode())
else:
cfl_template = None
pyzebra.export_cfl_file(cfl_path, params, cfl_template)
log.info(f"Content of {cfl_path}:")
with open(cfl_path) as f:
log.info(f.read())
comp_proc = subprocess.run(
[pyzebra.SXTAL_REFGEN_PATH, cfl_path],
cwd=temp_dir,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
)
log.info(" ".join(comp_proc.args))
log.info(comp_proc.stdout)
if i == 1: # all hkl files are identical, so keep only one
hkl_fname = base_fname + ".hkl"
hkl_fpath = os.path.join(temp_dir, hkl_fname)
with open(hkl_fpath) as f:
res_files[hkl_fname] = f.read()
hkl_fname_sorted = base_fname + "_sorted.hkl"
hkl_fpath_sorted = os.path.join(temp_dir, hkl_fname_sorted)
sort_hkl_file(hkl_fpath, hkl_fpath_sorted, priority, chunks)
with open(hkl_fpath_sorted) as f:
res_files[hkl_fname_sorted] = f.read()
mhkl_fname = base_fname + ".mhkl"
mhkl_fpath = os.path.join(temp_dir, mhkl_fname)
with open(mhkl_fpath) as f:
res_files[mhkl_fname] = f.read()
mhkl_fname_sorted = base_fname + "_sorted.mhkl"
mhkl_fpath_sorted = os.path.join(temp_dir, hkl_fname_sorted)
sort_hkl_file(mhkl_fpath, mhkl_fpath_sorted, priority, chunks)
with open(mhkl_fpath_sorted) as f:
res_files[mhkl_fname_sorted] = f.read()
created_lists.options = list(res_files)
go_button = Button(label="GO", button_type="primary", width=50)
go_button.on_click(go_button_callback)
def created_lists_callback(_attr, _old, new):
sel_file = new[0]
file_text = res_files[sel_file]
preview_lists.value = file_text
app_dlfiles.set_contents([file_text])
app_dlfiles.set_names([sel_file])
created_lists = MultiSelect(title="Created lists:", width=200, height=150)
created_lists.on_change("value", created_lists_callback)
preview_lists = TextAreaInput(title="Preview selected list:", width=600, height=150)
def plot_list_callback():
nonlocal _update_slice
fname = created_lists.value
with io.StringIO(preview_lists.value) as fileobj:
fdata = pyzebra.parse_hkl(fileobj, fname)
_update_slice = _prepare_plotting(fname, [fdata])
_update_slice()
plot_list = Button(label="Plot selected list", button_type="primary", width=200)
plot_list.on_click(plot_list_callback)
# Plot
upload_data_div = Div(text="Open hkl/mhkl data:")
upload_data = FileInput(accept=".hkl,.mhkl", multiple=True, width=200)
min_grid_x = -10
max_grid_x = 10
min_grid_y = -10
max_grid_y = 10
cmap = Dark2[8]
syms = ["circle", "inverted_triangle", "square", "diamond", "star", "triangle"]
def _prepare_plotting(filenames, filedata):
orth_dir = list(map(float, hkl_normal.value.split()))
x_dir = list(map(float, hkl_in_plane_x.value.split()))
k = np.array(k_vectors.value.split()).astype(float).reshape(-1, 3)
tol_k = tol_k_ni.value
lattice = list(map(float, cryst_cell.value.strip().split()))
alpha = lattice[3] * np.pi / 180.0
beta = lattice[4] * np.pi / 180.0
gamma = lattice[5] * np.pi / 180.0
# reciprocal angle parameters
beta_star = np.arccos(
(np.cos(alpha) * np.cos(gamma) - np.cos(beta)) / (np.sin(alpha) * np.sin(gamma))
)
gamma_star = np.arccos(
(np.cos(alpha) * np.cos(beta) - np.cos(gamma)) / (np.sin(alpha) * np.sin(beta))
)
# conversion matrix
M = np.array(
[
[1, np.cos(gamma_star), np.cos(beta_star)],
[0, np.sin(gamma_star), -np.sin(beta_star) * np.cos(alpha)],
[0, 0, np.sin(beta_star) * np.sin(alpha)],
]
)
# Get last lattice vector
y_dir = np.cross(x_dir, orth_dir) # Second axes of plotting plane
# Rescale such that smallest element of y-dir vector is 1
y_dir2 = y_dir[y_dir != 0]
min_val = np.min(np.abs(y_dir2))
y_dir = y_dir / min_val
# Possibly flip direction of ydir:
if y_dir[np.argmax(abs(y_dir))] < 0:
y_dir = -y_dir
# Display the resulting y_dir
hkl_in_plane_y.value = " ".join([f"{val:.1f}" for val in y_dir])
# Save length of lattice vectors
x_length = np.linalg.norm(x_dir)
y_length = np.linalg.norm(y_dir)
# Save str for labels
xlabel_str = " ".join(map(str, x_dir))
ylabel_str = " ".join(map(str, y_dir))
# Normalize lattice vectors
y_dir = y_dir / np.linalg.norm(y_dir)
x_dir = x_dir / np.linalg.norm(x_dir)
orth_dir = orth_dir / np.linalg.norm(orth_dir)
# Calculate cartesian equivalents of lattice vectors
x_c = np.matmul(M, x_dir)
y_c = np.matmul(M, y_dir)
o_c = np.matmul(M, orth_dir)
# Calulcate vertical direction in plotting plame
y_vert = np.cross(x_c, o_c) # verical direction in plotting plane
if y_vert[np.argmax(abs(y_vert))] < 0:
y_vert = -y_vert
y_vert = y_vert / np.linalg.norm(y_vert)
# Normalize all directions
y_c = y_c / np.linalg.norm(y_c)
x_c = x_c / np.linalg.norm(x_c)
o_c = o_c / np.linalg.norm(o_c)
# Read all data
hkl_coord = []
intensity_vec = []
k_flag_vec = []
file_flag_vec = []
for j, fdata in enumerate(filedata):
for ind in range(len(fdata["counts"])):
# Recognize k_flag_vec
hkl = np.array([fdata["h"][ind], fdata["k"][ind], fdata["l"][ind]])
reduced_hkl_m = np.minimum(1 - hkl % 1, hkl % 1)
for k_ind, _k in enumerate(k):
if all(np.abs(reduced_hkl_m - _k) < tol_k):
k_flag_vec.append(k_ind)
break
else:
# not required
continue
# Save data
hkl_coord.append(hkl)
intensity_vec.append(fdata["counts"][ind])
file_flag_vec.append(j)
x_spacing = np.dot(M @ x_dir, x_c) * x_length
y_spacing = np.dot(M @ y_dir, y_vert) * y_length
y_spacingx = np.dot(M @ y_dir, x_c) * y_length
# Plot coordinate system
arrow1.x_end = x_spacing
arrow1.y_end = 0
arrow2.x_end = y_spacingx
arrow2.y_end = y_spacing
# Add labels
kvect_source.data.update(
x=[x_spacing / 4, -0.1],
y=[x_spacing / 4 - 0.5, y_spacing / 2],
text=[xlabel_str, ylabel_str],
)
# Plot grid lines
xs, ys = [], []
xs_minor, ys_minor = [], []
for yy in np.arange(min_grid_y, max_grid_y, 1):
# Calculate end and start point
hkl1 = min_grid_x * x_dir + yy * y_dir
hkl2 = max_grid_x * x_dir + yy * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs.append([x1, x2])
ys.append([y1, y2])
for xx in np.arange(min_grid_x, max_grid_x, 1):
# Calculate end and start point
hkl1 = xx * x_dir + min_grid_y * y_dir
hkl2 = xx * x_dir + max_grid_y * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs.append([x1, x2])
ys.append([y1, y2])
for yy in np.arange(min_grid_y, max_grid_y, 0.5):
# Calculate end and start point
hkl1 = min_grid_x * x_dir + yy * y_dir
hkl2 = max_grid_x * x_dir + yy * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs_minor.append([x1, x2])
ys_minor.append([y1, y2])
for xx in np.arange(min_grid_x, max_grid_x, 0.5):
# Calculate end and start point
hkl1 = xx * x_dir + min_grid_y * y_dir
hkl2 = xx * x_dir + max_grid_y * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs_minor.append([x1, x2])
ys_minor.append([y1, y2])
grid_source.data.update(xs=xs, ys=ys)
minor_grid_source.data.update(xs=xs_minor, ys=ys_minor)
def _update_slice():
cut_tol = hkl_delta.value
cut_or = hkl_cut.value
# different symbols based on file number
file_flag = 0 in disting_opt_cb.active
# scale marker size according to intensity
intensity_flag = 1 in disting_opt_cb.active
# use color to mark different propagation vectors
prop_legend_flag = 2 in disting_opt_cb.active
scan_x, scan_y = [], []
scan_m, scan_s, scan_c, scan_l, scan_hkl = [], [], [], [], []
for j in range(len(hkl_coord)):
# Get middle hkl from list
hklm = M @ hkl_coord[j]
# Decide if point is in the cut
proj = np.dot(hklm, o_c)
if abs(proj - cut_or) >= cut_tol:
continue
# Project onto axes
hklmx = np.dot(hklm, x_c)
hklmy = np.dot(hklm, y_vert)
if intensity_flag and max(intensity_vec) != 0:
markersize = max(6, int(intensity_vec[j] / max(intensity_vec) * 30))
else:
markersize = 6
if file_flag:
plot_symbol = syms[file_flag_vec[j]]
else:
plot_symbol = "circle"
if prop_legend_flag:
col_value = cmap[k_flag_vec[j]]
else:
col_value = "black"
# Plot middle point of scan
scan_x.append(hklmx)
scan_y.append(hklmy)
scan_m.append(plot_symbol)
scan_s.append(markersize)
# Color and legend label
scan_c.append(col_value)
scan_l.append(filenames[file_flag_vec[j]])
scan_hkl.append(hkl_coord[j])
scatter_source.data.update(
x=scan_x, y=scan_y, m=scan_m, s=scan_s, c=scan_c, l=scan_l, hkl=scan_hkl
)
# Legend items for different file entries (symbol)
legend_items = []
if file_flag:
labels, inds = np.unique(scatter_source.data["l"], return_index=True)
for label, ind in zip(labels, inds):
legend_items.append(LegendItem(label=label, renderers=[scatter], index=ind))
# Legend items for propagation vector (color)
if prop_legend_flag:
labels, inds = np.unique(scatter_source.data["c"], return_index=True)
for label, ind in zip(labels, inds):
label = f"k={k[cmap.index(label)]}"
legend_items.append(LegendItem(label=label, renderers=[scatter], index=ind))
plot.legend.items = legend_items
return _update_slice
def plot_file_callback():
nonlocal _update_slice
fnames = []
fdata = []
for j, fname in enumerate(upload_data.filename):
with io.StringIO(base64.b64decode(upload_data.value[j]).decode()) as file:
_, ext = os.path.splitext(fname)
try:
file_data = pyzebra.parse_hkl(file, ext)
except Exception as e:
log.exception(e)
return
fnames.append(fname)
fdata.append(file_data)
_update_slice = _prepare_plotting(fnames, fdata)
_update_slice()
plot_file = Button(label="Plot selected file(s)", button_type="primary", width=200)
plot_file.on_click(plot_file_callback)
plot = figure(height=550, width=550 + 32, tools="pan,wheel_zoom,reset")
plot.toolbar.logo = None
plot.xaxis.visible = False
plot.xgrid.visible = False
plot.yaxis.visible = False
plot.ygrid.visible = False
arrow1 = Arrow(x_start=0, y_start=0, x_end=0, y_end=0, end=NormalHead(size=10))
plot.add_layout(arrow1)
arrow2 = Arrow(x_start=0, y_start=0, x_end=0, y_end=0, end=NormalHead(size=10))
plot.add_layout(arrow2)
kvect_source = ColumnDataSource(dict(x=[], y=[], text=[]))
plot.text(source=kvect_source)
grid_source = ColumnDataSource(dict(xs=[], ys=[]))
plot.multi_line(source=grid_source, line_color="gray")
minor_grid_source = ColumnDataSource(dict(xs=[], ys=[]))
plot.multi_line(source=minor_grid_source, line_color="gray", line_dash="dotted")
scatter_source = ColumnDataSource(dict(x=[], y=[], m=[], s=[], c=[], l=[], hkl=[]))
scatter = plot.scatter(
source=scatter_source, marker="m", size="s", fill_color="c", line_color="c"
)
plot.x_range.renderers = [scatter]
plot.y_range.renderers = [scatter]
plot.add_layout(Legend(items=[], location="top_left", click_policy="hide"))
plot.add_tools(HoverTool(renderers=[scatter], tooltips=[("hkl", "@hkl")]))
hkl_div = Div(text="HKL:", margin=(5, 5, 0, 5))
hkl_normal = TextInput(title="normal", value="0 0 1", width=70)
def hkl_cut_callback(_attr, _old, _new):
if _update_slice is not None:
_update_slice()
hkl_cut = Spinner(title="cut", value=0, step=0.1, width=70)
hkl_cut.on_change("value_throttled", hkl_cut_callback)
hkl_delta = NumericInput(title="delta", value=0.1, mode="float", width=70)
hkl_in_plane_x = TextInput(title="in-plane X", value="1 0 0", width=70)
hkl_in_plane_y = TextInput(title="in-plane Y", value="", width=100, disabled=True)
disting_opt_div = Div(text="Distinguish options:", margin=(5, 5, 0, 5))
disting_opt_cb = CheckboxGroup(
labels=["files (symbols)", "intensities (size)", "k vectors nucl/magn (colors)"],
active=[0, 1, 2],
width=200,
)
k_vectors = TextAreaInput(
title="k vectors:", value="0.0 0.0 0.0\n0.5 0.0 0.0\n0.5 0.5 0.0", width=150
)
tol_k_ni = NumericInput(title="k tolerance:", value=0.01, mode="float", width=100)
fileinput_layout = row(open_cfl_div, open_cfl, open_cif_div, open_cif, open_geom_div, open_geom)
geom_layout = column(geom_radiogroup_div, geom_radiogroup)
wavelen_layout = column(wavelen_div, row(wavelen_select, wavelen_input))
anglim_layout = column(anglim_div, row(sttgamma_ti, omega_ti, chinu_ti, phi_ti))
cryst_layout = column(cryst_div, row(cryst_space_group, cryst_cell))
ubmat_layout = row(column(Spacer(height=19), ub_matrix_calc), ub_matrix)
ranges_layout = column(ranges_div, row(ranges_hkl, ranges_srang))
magstruct_layout = column(magstruct_div, row(magstruct_lattice, magstruct_kvec))
sorting_layout = row(
sorting_0,
sorting_0_dt,
Spacer(width=30),
sorting_1,
sorting_1_dt,
Spacer(width=30),
sorting_2,
sorting_2_dt,
)
column1_layout = column(
fileinput_layout,
Spacer(height=10),
row(geom_layout, wavelen_layout, Spacer(width=50), anglim_layout),
cryst_layout,
ubmat_layout,
row(ranges_layout, Spacer(width=50), magstruct_layout),
row(sorting_layout, Spacer(width=30), column(Spacer(height=19), go_button)),
row(created_lists, preview_lists),
row(app_dlfiles.button, plot_list),
)
column2_layout = column(
row(upload_data_div, upload_data, plot_file),
row(
plot,
column(
hkl_div,
row(hkl_normal, hkl_cut, hkl_delta),
row(hkl_in_plane_x, hkl_in_plane_y),
k_vectors,
tol_k_ni,
disting_opt_div,
disting_opt_cb,
),
),
)
tab_layout = row(column1_layout, column2_layout)
return Panel(child=tab_layout, title="ccl prepare")

View File

@ -19,12 +19,11 @@ from bokeh.models import (
) )
import pyzebra import pyzebra
from pyzebra import DATA_FACTORY_IMPLEMENTATION, REFLECTION_PRINTER_FORMATS from pyzebra.anatric import DATA_FACTORY_IMPLEMENTATION, REFLECTION_PRINTER_FORMATS
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger
config = pyzebra.AnatricConfig() config = pyzebra.AnatricConfig()
def _load_config_file(file): def _load_config_file(file):
@ -170,7 +169,7 @@ def create():
config.dataFactory_implementation = new config.dataFactory_implementation = new
dataFactory_implementation_select = Select( dataFactory_implementation_select = Select(
title="DataFactory implement.:", options=DATA_FACTORY_IMPLEMENTATION, width=145 title="DataFactory implement.:", options=DATA_FACTORY_IMPLEMENTATION, width=145,
) )
dataFactory_implementation_select.on_change("value", dataFactory_implementation_select_callback) dataFactory_implementation_select.on_change("value", dataFactory_implementation_select_callback)
@ -201,7 +200,7 @@ def create():
config.reflectionPrinter_format = new config.reflectionPrinter_format = new
reflectionPrinter_format_select = Select( reflectionPrinter_format_select = Select(
title="ReflectionPrinter format:", options=REFLECTION_PRINTER_FORMATS, width=145 title="ReflectionPrinter format:", options=REFLECTION_PRINTER_FORMATS, width=145,
) )
reflectionPrinter_format_select.on_change("value", reflectionPrinter_format_select_callback) reflectionPrinter_format_select.on_change("value", reflectionPrinter_format_select_callback)
@ -348,11 +347,7 @@ def create():
with tempfile.TemporaryDirectory() as temp_dir: with tempfile.TemporaryDirectory() as temp_dir:
temp_file = temp_dir + "/config.xml" temp_file = temp_dir + "/config.xml"
config.save_as(temp_file) config.save_as(temp_file)
try: pyzebra.anatric(temp_file, anatric_path=doc.anatric_path, cwd=temp_dir)
pyzebra.anatric(temp_file, anatric_path=doc.anatric_path, cwd=temp_dir, log=log)
except Exception as e:
log.exception(e)
return
with open(os.path.join(temp_dir, config.logfile)) as f_log: with open(os.path.join(temp_dir, config.logfile)) as f_log:
output_log.value = f_log.read() output_log.value = f_log.read()

View File

@ -6,38 +6,50 @@ import numpy as np
from bokeh.io import curdoc from bokeh.io import curdoc
from bokeh.layouts import column, gridplot, row from bokeh.layouts import column, gridplot, row
from bokeh.models import ( from bokeh.models import (
BasicTicker,
BoxZoomTool,
Button, Button,
CellEditor, CellEditor,
CheckboxGroup, CheckboxGroup,
ColumnDataSource, ColumnDataSource,
DataRange1d,
DataTable, DataTable,
Div, Div,
FileInput, FileInput,
LinearColorMapper, Grid,
MultiSelect, MultiSelect,
NumberEditor, NumberEditor,
NumberFormatter, NumberFormatter,
Image,
LinearAxis,
LinearColorMapper,
Panel, Panel,
PanTool,
Plot,
Range1d, Range1d,
ResetTool,
Scatter,
Select, Select,
Spinner, Spinner,
TableColumn, TableColumn,
Tabs, Tabs,
Title,
WheelZoomTool,
) )
from bokeh.plotting import figure from bokeh.palettes import Cividis256, Greys256, Plasma256 # pylint: disable=E0611
import pyzebra import pyzebra
IMAGE_W = 256 IMAGE_W = 256
IMAGE_H = 128 IMAGE_H = 128
IMAGE_PLOT_W = int(IMAGE_W * 2.4) + 52 IMAGE_PLOT_W = int(IMAGE_W * 2) + 52
IMAGE_PLOT_H = int(IMAGE_H * 2.4) + 27 IMAGE_PLOT_H = int(IMAGE_H * 2) + 27
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger zebra_data = []
dataset = [] det_data = {}
cami_meta = {} cami_meta = {}
num_formatter = NumberFormatter(format="0.00", nan_format="") num_formatter = NumberFormatter(format="0.00", nan_format="")
@ -96,15 +108,15 @@ def create():
def _init_datatable(): def _init_datatable():
file_list = [] file_list = []
for scan in dataset: for scan in zebra_data:
file_list.append(os.path.basename(scan["original_filename"])) file_list.append(os.path.basename(scan["original_filename"]))
scan_table_source.data.update( scan_table_source.data.update(
file=file_list, file=file_list,
param=[None] * len(dataset), param=[None] * len(zebra_data),
frame=[None] * len(dataset), frame=[None] * len(zebra_data),
x_pos=[None] * len(dataset), x_pos=[None] * len(zebra_data),
y_pos=[None] * len(dataset), y_pos=[None] * len(zebra_data),
) )
scan_table_source.selected.indices = [] scan_table_source.selected.indices = []
scan_table_source.selected.indices = [0] scan_table_source.selected.indices = [0]
@ -115,7 +127,7 @@ def create():
frame = [] frame = []
x_pos = [] x_pos = []
y_pos = [] y_pos = []
for scan in dataset: for scan in zebra_data:
if "fit" in scan: if "fit" in scan:
framei = scan["fit"]["frame"] framei = scan["fit"]["frame"]
x_posi = scan["fit"]["x_pos"] x_posi = scan["fit"]["x_pos"]
@ -134,17 +146,17 @@ def create():
for f_name in file_select.value: for f_name in file_select.value:
try: try:
new_data.append(pyzebra.read_detector_data(f_name)) new_data.append(pyzebra.read_detector_data(f_name))
except KeyError as e: except KeyError:
log.exception(e) print("Could not read data from the file.")
return return
dataset.extend(new_data) zebra_data.extend(new_data)
_init_datatable() _init_datatable()
def file_open_button_callback(): def file_open_button_callback():
nonlocal dataset nonlocal zebra_data
dataset = [] zebra_data = []
_file_open() _file_open()
file_open_button = Button(label="Open New", width=100) file_open_button = Button(label="Open New", width=100)
@ -158,6 +170,8 @@ def create():
# Scan select # Scan select
def scan_table_select_callback(_attr, old, new): def scan_table_select_callback(_attr, old, new):
nonlocal det_data
if not new: if not new:
# skip empty selections # skip empty selections
return return
@ -172,25 +186,25 @@ def create():
# skip unnecessary update caused by selection drop # skip unnecessary update caused by selection drop
return return
scan = dataset[new[0]] det_data = zebra_data[new[0]]
zebra_mode = scan["zebra_mode"] zebra_mode = det_data["zebra_mode"]
if zebra_mode == "nb": if zebra_mode == "nb":
metadata_table_source.data.update(geom=["normal beam"]) metadata_table_source.data.update(geom=["normal beam"])
else: # zebra_mode == "bi" else: # zebra_mode == "bi"
metadata_table_source.data.update(geom=["bisecting"]) metadata_table_source.data.update(geom=["bisecting"])
if "mf" in scan: if "mf" in det_data:
metadata_table_source.data.update(mf=[scan["mf"][0]]) metadata_table_source.data.update(mf=[det_data["mf"][0]])
else: else:
metadata_table_source.data.update(mf=[None]) metadata_table_source.data.update(mf=[None])
if "temp" in scan: if "temp" in det_data:
metadata_table_source.data.update(temp=[scan["temp"][0]]) metadata_table_source.data.update(temp=[det_data["temp"][0]])
else: else:
metadata_table_source.data.update(temp=[None]) metadata_table_source.data.update(temp=[None])
_update_proj_plots() update_overview_plot()
def scan_table_source_callback(_attr, _old, _new): def scan_table_source_callback(_attr, _old, _new):
pass pass
@ -226,15 +240,12 @@ def create():
autosize_mode="none", autosize_mode="none",
) )
def _get_selected_scan():
return dataset[scan_table_source.selected.indices[0]]
def param_select_callback(_attr, _old, new): def param_select_callback(_attr, _old, new):
if new == "user defined": if new == "user defined":
param = [None] * len(dataset) param = [None] * len(zebra_data)
else: else:
# TODO: which value to take? # TODO: which value to take?
param = [scan[new][0] for scan in dataset] param = [scan[new][0] for scan in zebra_data]
scan_table_source.data["param"] = param scan_table_source.data["param"] = param
_update_param_plot() _update_param_plot()
@ -247,38 +258,42 @@ def create():
) )
param_select.on_change("value", param_select_callback) param_select.on_change("value", param_select_callback)
def _update_proj_plots(): def update_overview_plot():
scan = _get_selected_scan() h5_data = det_data["data"]
counts = scan["counts"] n_im, n_y, n_x = h5_data.shape
n_im, n_y, n_x = counts.shape overview_x = np.mean(h5_data, axis=1)
im_proj_x = np.mean(counts, axis=1) overview_y = np.mean(h5_data, axis=2)
im_proj_y = np.mean(counts, axis=2)
# normalize for simpler colormapping # normalize for simpler colormapping
im_proj_max_val = max(np.max(im_proj_x), np.max(im_proj_y)) overview_max_val = max(np.max(overview_x), np.max(overview_y))
im_proj_x = 1000 * im_proj_x / im_proj_max_val overview_x = 1000 * overview_x / overview_max_val
im_proj_y = 1000 * im_proj_y / im_proj_max_val overview_y = 1000 * overview_y / overview_max_val
proj_x_image_source.data.update(image=[im_proj_x], dw=[n_x], dh=[n_im]) overview_plot_x_image_source.data.update(image=[overview_x], dw=[n_x], dh=[n_im])
proj_y_image_source.data.update(image=[im_proj_y], dw=[n_y], dh=[n_im]) overview_plot_y_image_source.data.update(image=[overview_y], dw=[n_y], dh=[n_im])
if proj_auto_checkbox.active: if proj_auto_checkbox.active:
im_min = min(np.min(im_proj_x), np.min(im_proj_y)) im_min = min(np.min(overview_x), np.min(overview_y))
im_max = max(np.max(im_proj_x), np.max(im_proj_y)) im_max = max(np.max(overview_x), np.max(overview_y))
proj_display_min_spinner.value = im_min proj_display_min_spinner.value = im_min
proj_display_max_spinner.value = im_max proj_display_max_spinner.value = im_max
overview_plot_x_image_glyph.color_mapper.low = im_min
overview_plot_y_image_glyph.color_mapper.low = im_min
overview_plot_x_image_glyph.color_mapper.high = im_max
overview_plot_y_image_glyph.color_mapper.high = im_max
frame_range.start = 0 frame_range.start = 0
frame_range.end = n_im frame_range.end = n_im
frame_range.reset_start = 0 frame_range.reset_start = 0
frame_range.reset_end = n_im frame_range.reset_end = n_im
frame_range.bounds = (0, n_im) frame_range.bounds = (0, n_im)
scan_motor = scan["scan_motor"] scan_motor = det_data["scan_motor"]
proj_y_plot.yaxis.axis_label = f"Scanning motor, {scan_motor}" overview_plot_y.axis[1].axis_label = f"Scanning motor, {scan_motor}"
var = scan[scan_motor] var = det_data[scan_motor]
var_start = var[0] var_start = var[0]
var_end = var[-1] + (var[-1] - var[0]) / (n_im - 1) var_end = var[-1] + (var[-1] - var[0]) / (n_im - 1)
@ -292,95 +307,148 @@ def create():
# shared frame ranges # shared frame ranges
frame_range = Range1d(0, 1, bounds=(0, 1)) frame_range = Range1d(0, 1, bounds=(0, 1))
scanning_motor_range = Range1d(0, 1, bounds=(0, 1)) scanning_motor_range = Range1d(0, 1, bounds=(0, 1))
color_mapper_proj = LinearColorMapper()
det_x_range = Range1d(0, IMAGE_W, bounds=(0, IMAGE_W)) det_x_range = Range1d(0, IMAGE_W, bounds=(0, IMAGE_W))
proj_x_plot = figure( overview_plot_x = Plot(
title="Projections on X-axis", title=Title(text="Projections on X-axis"),
x_axis_label="Coordinate X, pix",
y_axis_label="Frame",
x_range=det_x_range, x_range=det_x_range,
y_range=frame_range, y_range=frame_range,
extra_y_ranges={"scanning_motor": scanning_motor_range}, extra_y_ranges={"scanning_motor": scanning_motor_range},
height=540, plot_height=400,
width=IMAGE_PLOT_W - 3, plot_width=IMAGE_PLOT_W - 3,
tools="pan,box_zoom,wheel_zoom,reset",
active_scroll="wheel_zoom",
) )
proj_x_plot.yaxis.major_label_orientation = "vertical" # ---- tools
proj_x_plot.toolbar.tools[2].maintain_focus = False wheelzoomtool = WheelZoomTool(maintain_focus=False)
overview_plot_x.toolbar.logo = None
overview_plot_x.add_tools(
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
)
overview_plot_x.toolbar.active_scroll = wheelzoomtool
proj_x_image_source = ColumnDataSource( # ---- axes
overview_plot_x.add_layout(LinearAxis(axis_label="Coordinate X, pix"), place="below")
overview_plot_x.add_layout(
LinearAxis(axis_label="Frame", major_label_orientation="vertical"), place="left"
)
# ---- grid lines
overview_plot_x.add_layout(Grid(dimension=0, ticker=BasicTicker()))
overview_plot_x.add_layout(Grid(dimension=1, ticker=BasicTicker()))
# ---- rgba image glyph
overview_plot_x_image_source = ColumnDataSource(
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_W], dh=[1]) dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_W], dh=[1])
) )
proj_x_plot.image(source=proj_x_image_source, color_mapper=color_mapper_proj) overview_plot_x_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
overview_plot_x.add_glyph(
overview_plot_x_image_source, overview_plot_x_image_glyph, name="image_glyph"
)
det_y_range = Range1d(0, IMAGE_H, bounds=(0, IMAGE_H)) det_y_range = Range1d(0, IMAGE_H, bounds=(0, IMAGE_H))
proj_y_plot = figure( overview_plot_y = Plot(
title="Projections on Y-axis", title=Title(text="Projections on Y-axis"),
x_axis_label="Coordinate Y, pix",
y_axis_label="Scanning motor",
y_axis_location="right",
x_range=det_y_range, x_range=det_y_range,
y_range=frame_range, y_range=frame_range,
extra_y_ranges={"scanning_motor": scanning_motor_range}, extra_y_ranges={"scanning_motor": scanning_motor_range},
height=540, plot_height=400,
width=IMAGE_PLOT_H + 22, plot_width=IMAGE_PLOT_H + 22,
tools="pan,box_zoom,wheel_zoom,reset",
active_scroll="wheel_zoom",
) )
proj_y_plot.yaxis.y_range_name = "scanning_motor" # ---- tools
proj_y_plot.yaxis.major_label_orientation = "vertical" wheelzoomtool = WheelZoomTool(maintain_focus=False)
proj_y_plot.toolbar.tools[2].maintain_focus = False overview_plot_y.toolbar.logo = None
overview_plot_y.add_tools(
PanTool(), BoxZoomTool(), wheelzoomtool, ResetTool(),
)
overview_plot_y.toolbar.active_scroll = wheelzoomtool
proj_y_image_source = ColumnDataSource( # ---- axes
overview_plot_y.add_layout(LinearAxis(axis_label="Coordinate Y, pix"), place="below")
overview_plot_y.add_layout(
LinearAxis(
y_range_name="scanning_motor",
axis_label="Scanning motor",
major_label_orientation="vertical",
),
place="right",
)
# ---- grid lines
overview_plot_y.add_layout(Grid(dimension=0, ticker=BasicTicker()))
overview_plot_y.add_layout(Grid(dimension=1, ticker=BasicTicker()))
# ---- rgba image glyph
overview_plot_y_image_source = ColumnDataSource(
dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_H], dh=[1]) dict(image=[np.zeros((1, 1), dtype="float32")], x=[0], y=[0], dw=[IMAGE_H], dh=[1])
) )
proj_y_plot.image(source=proj_y_image_source, color_mapper=color_mapper_proj) overview_plot_y_image_glyph = Image(image="image", x="x", y="y", dw="dw", dh="dh")
overview_plot_y.add_glyph(
def colormap_select_callback(_attr, _old, new): overview_plot_y_image_source, overview_plot_y_image_glyph, name="image_glyph"
color_mapper_proj.palette = new
colormap_select = Select(
title="Colormap:",
options=[("Greys256", "greys"), ("Plasma256", "plasma"), ("Cividis256", "cividis")],
width=210,
) )
colormap_select.on_change("value", colormap_select_callback)
colormap_select.value = "Plasma256"
def proj_auto_checkbox_callback(_attr, _old, new): cmap_dict = {
if 0 in new: "gray": Greys256,
"gray_reversed": Greys256[::-1],
"plasma": Plasma256,
"cividis": Cividis256,
}
def colormap_callback(_attr, _old, new):
overview_plot_x_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
overview_plot_y_image_glyph.color_mapper = LinearColorMapper(palette=cmap_dict[new])
colormap = Select(title="Colormap:", options=list(cmap_dict.keys()), width=210)
colormap.on_change("value", colormap_callback)
colormap.value = "plasma"
PROJ_STEP = 1
def proj_auto_checkbox_callback(state):
if state:
proj_display_min_spinner.disabled = True proj_display_min_spinner.disabled = True
proj_display_max_spinner.disabled = True proj_display_max_spinner.disabled = True
else: else:
proj_display_min_spinner.disabled = False proj_display_min_spinner.disabled = False
proj_display_max_spinner.disabled = False proj_display_max_spinner.disabled = False
_update_proj_plots() update_overview_plot()
proj_auto_checkbox = CheckboxGroup( proj_auto_checkbox = CheckboxGroup(
labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5] labels=["Projections Intensity Range"], active=[0], width=145, margin=[10, 5, 0, 5]
) )
proj_auto_checkbox.on_change("active", proj_auto_checkbox_callback) proj_auto_checkbox.on_click(proj_auto_checkbox_callback)
def proj_display_max_spinner_callback(_attr, _old, new): def proj_display_max_spinner_callback(_attr, _old_value, new_value):
color_mapper_proj.high = new proj_display_min_spinner.high = new_value - PROJ_STEP
overview_plot_x_image_glyph.color_mapper.high = new_value
overview_plot_y_image_glyph.color_mapper.high = new_value
proj_display_max_spinner = Spinner( proj_display_max_spinner = Spinner(
value=1, disabled=bool(proj_auto_checkbox.active), mode="int", width=100 low=0 + PROJ_STEP,
value=1,
step=PROJ_STEP,
disabled=bool(proj_auto_checkbox.active),
width=100,
height=31,
) )
proj_display_max_spinner.on_change("value", proj_display_max_spinner_callback) proj_display_max_spinner.on_change("value", proj_display_max_spinner_callback)
def proj_display_min_spinner_callback(_attr, _old, new): def proj_display_min_spinner_callback(_attr, _old_value, new_value):
color_mapper_proj.low = new proj_display_max_spinner.low = new_value + PROJ_STEP
overview_plot_x_image_glyph.color_mapper.low = new_value
overview_plot_y_image_glyph.color_mapper.low = new_value
proj_display_min_spinner = Spinner( proj_display_min_spinner = Spinner(
value=0, disabled=bool(proj_auto_checkbox.active), mode="int", width=100 low=0,
high=1 - PROJ_STEP,
value=0,
step=PROJ_STEP,
disabled=bool(proj_auto_checkbox.active),
width=100,
height=31,
) )
proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback) proj_display_min_spinner.on_change("value", proj_display_min_spinner_callback)
@ -402,24 +470,25 @@ def create():
x = [] x = []
y = [] y = []
fit_param = fit_param_select.value fit_param = fit_param_select.value
for s, p in zip(dataset, scan_table_source.data["param"]): for s, p in zip(zebra_data, scan_table_source.data["param"]):
if "fit" in s and fit_param: if "fit" in s and fit_param:
x.append(p) x.append(p)
y.append(s["fit"][fit_param]) y.append(s["fit"][fit_param])
param_scatter_source.data.update(x=x, y=y) param_plot_scatter_source.data.update(x=x, y=y)
# Parameter plot # Parameter plot
param_plot = figure( param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
x_axis_label="Parameter",
y_axis_label="Fit parameter",
height=400,
width=700,
tools="pan,wheel_zoom,reset",
)
param_scatter_source = ColumnDataSource(dict(x=[], y=[])) param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
param_plot.circle(source=param_scatter_source) param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[]))
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
param_plot.toolbar.logo = None param_plot.toolbar.logo = None
def fit_param_select_callback(_attr, _old, _new): def fit_param_select_callback(_attr, _old, _new):
@ -429,7 +498,7 @@ def create():
fit_param_select.on_change("value", fit_param_select_callback) fit_param_select.on_change("value", fit_param_select_callback)
def proc_all_button_callback(): def proc_all_button_callback():
for scan in dataset: for scan in zebra_data:
pyzebra.fit_event( pyzebra.fit_event(
scan, scan,
int(np.floor(frame_range.start)), int(np.floor(frame_range.start)),
@ -442,7 +511,7 @@ def create():
_update_table() _update_table()
for scan in dataset: for scan in zebra_data:
if "fit" in scan: if "fit" in scan:
options = list(scan["fit"].keys()) options = list(scan["fit"].keys())
fit_param_select.options = options fit_param_select.options = options
@ -455,9 +524,8 @@ def create():
proc_all_button.on_click(proc_all_button_callback) proc_all_button.on_click(proc_all_button_callback)
def proc_button_callback(): def proc_button_callback():
scan = _get_selected_scan()
pyzebra.fit_event( pyzebra.fit_event(
scan, det_data,
int(np.floor(frame_range.start)), int(np.floor(frame_range.start)),
int(np.ceil(frame_range.end)), int(np.ceil(frame_range.end)),
int(np.floor(det_y_range.start)), int(np.floor(det_y_range.start)),
@ -468,7 +536,7 @@ def create():
_update_table() _update_table()
for scan in dataset: for scan in zebra_data:
if "fit" in scan: if "fit" in scan:
options = list(scan["fit"].keys()) options = list(scan["fit"].keys())
fit_param_select.options = options fit_param_select.options = options
@ -481,15 +549,18 @@ def create():
proc_button.on_click(proc_button_callback) proc_button.on_click(proc_button_callback)
layout_controls = row( layout_controls = row(
colormap_select, colormap,
column(proj_auto_checkbox, row(proj_display_min_spinner, proj_display_max_spinner)), column(proj_auto_checkbox, row(proj_display_min_spinner, proj_display_max_spinner)),
proc_button, proc_button,
proc_all_button, proc_all_button,
) )
layout_proj = column( layout_overview = column(
gridplot( gridplot(
[[proj_x_plot, proj_y_plot]], toolbar_options={"logo": None}, toolbar_location="right" [[overview_plot_x, overview_plot_y]],
toolbar_options=dict(logo=None),
merge_tools=True,
toolbar_location="left",
), ),
layout_controls, layout_controls,
) )
@ -497,7 +568,7 @@ def create():
# Plot tabs # Plot tabs
plots = Tabs( plots = Tabs(
tabs=[ tabs=[
Panel(child=layout_proj, title="single scan"), Panel(child=layout_overview, title="single scan"),
Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"), Panel(child=column(param_plot, row(fit_param_select)), title="parameter plot"),
] ]
) )

File diff suppressed because it is too large Load Diff

View File

@ -1,36 +1,79 @@
import base64
import io
import itertools import itertools
import os import os
import tempfile import tempfile
import types
import numpy as np import numpy as np
from bokeh.io import curdoc from bokeh.io import curdoc
from bokeh.layouts import column, row from bokeh.layouts import column, row
from bokeh.models import ( from bokeh.models import (
BasicTicker,
Button, Button,
CellEditor, CellEditor,
CheckboxEditor, CheckboxEditor,
CheckboxGroup,
ColumnDataSource, ColumnDataSource,
CustomJS,
DataRange1d,
DataTable, DataTable,
Div, Div,
Dropdown,
FileInput,
Grid,
HoverTool, HoverTool,
Image,
Legend,
Line,
LinearAxis,
LinearColorMapper, LinearColorMapper,
MultiLine,
MultiSelect,
NumberEditor, NumberEditor,
Panel, Panel,
PanTool,
Plot,
RadioGroup,
Range1d, Range1d,
ResetTool,
Scatter,
Select, Select,
Spacer, Spacer,
Span, Span,
Spinner,
TableColumn, TableColumn,
Tabs, Tabs,
TextAreaInput, TextAreaInput,
WheelZoomTool,
Whisker, Whisker,
) )
from bokeh.palettes import Category10, Plasma256 from bokeh.palettes import Category10, Plasma256
from bokeh.plotting import figure
from scipy import interpolate from scipy import interpolate
import pyzebra import pyzebra
from pyzebra import app from pyzebra.ccl_process import AREA_METHODS
javaScript = """
let j = 0;
for (let i = 0; i < js_data.data['fname'].length; i++) {
if (js_data.data['content'][i] === "") continue;
setTimeout(function() {
const blob = new Blob([js_data.data['content'][i]], {type: 'text/plain'})
const link = document.createElement('a');
document.body.appendChild(link);
const url = window.URL.createObjectURL(blob);
link.href = url;
link.download = js_data.data['fname'][i] + js_data.data['ext'][i];
link.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(link);
}, 100 * j)
j++;
}
"""
def color_palette(n_colors): def color_palette(n_colors):
@ -40,38 +83,178 @@ def color_palette(n_colors):
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger det_data = []
dataset = [] fit_params = {}
app_dlfiles = app.DownloadFiles(n_files=1) js_data = ColumnDataSource(data=dict(content=[""], fname=[""], ext=[""]))
def file_select_update_for_proposal():
proposal_path = proposal_textinput.name
if proposal_path:
file_list = []
for file in os.listdir(proposal_path):
if file.endswith((".ccl", ".dat")):
file_list.append((os.path.join(proposal_path, file), file))
file_select.options = file_list
file_open_button.disabled = False
file_append_button.disabled = False
else:
file_select.options = []
file_open_button.disabled = True
file_append_button.disabled = True
doc.add_periodic_callback(file_select_update_for_proposal, 5000)
def proposal_textinput_callback(_attr, _old, _new):
file_select_update_for_proposal()
proposal_textinput = doc.proposal_textinput
proposal_textinput.on_change("name", proposal_textinput_callback)
def _init_datatable(): def _init_datatable():
scan_list = [s["idx"] for s in dataset] scan_list = [s["idx"] for s in det_data]
export = [s["export"] for s in dataset] export = [s["export"] for s in det_data]
if param_select.value == "user defined": if param_select.value == "user defined":
param = [None] * len(dataset) param = [None] * len(det_data)
else: else:
param = [scan[param_select.value] for scan in dataset] param = [scan[param_select.value] for scan in det_data]
file_list = [] file_list = []
for scan in dataset: for scan in det_data:
file_list.append(os.path.basename(scan["original_filename"])) file_list.append(os.path.basename(scan["original_filename"]))
scan_table_source.data.update( scan_table_source.data.update(
file=file_list, scan=scan_list, param=param, fit=[0] * len(scan_list), export=export file=file_list, scan=scan_list, param=param, fit=[0] * len(scan_list), export=export,
) )
scan_table_source.selected.indices = [] scan_table_source.selected.indices = []
scan_table_source.selected.indices = [0] scan_table_source.selected.indices = [0]
scan_motor_select.options = dataset[0]["scan_motors"] scan_motor_select.options = det_data[0]["scan_motors"]
scan_motor_select.value = dataset[0]["scan_motor"] scan_motor_select.value = det_data[0]["scan_motor"]
merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)] merge_options = [(str(i), f"{i} ({idx})") for i, idx in enumerate(scan_list)]
merge_from_select.options = merge_options merge_from_select.options = merge_options
merge_from_select.value = merge_options[0][0] merge_from_select.value = merge_options[0][0]
file_select = MultiSelect(title="Available .ccl/.dat files:", width=210, height=250)
def file_open_button_callback():
nonlocal det_data
new_data = []
for f_path in file_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data)
js_data.data.update(fname=[base])
else:
pyzebra.merge_datasets(new_data, file_data)
if new_data:
det_data = new_data
_init_datatable()
append_upload_button.disabled = False
file_open_button = Button(label="Open New", width=100, disabled=True)
file_open_button.on_click(file_open_button_callback)
def file_append_button_callback():
file_data = []
for f_path in file_select.value:
with open(f_path) as file:
f_name = os.path.basename(f_path)
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(det_data, file_data)
if file_data:
_init_datatable()
file_append_button = Button(label="Append", width=100, disabled=True)
file_append_button.on_click(file_append_button_callback)
def upload_button_callback(_attr, _old, _new):
nonlocal det_data
new_data = []
for f_str, f_name in zip(upload_button.value, upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
base, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
if not new_data: # first file
new_data = file_data
pyzebra.merge_duplicates(new_data)
js_data.data.update(fname=[base])
else:
pyzebra.merge_datasets(new_data, file_data)
if new_data:
det_data = new_data
_init_datatable()
append_upload_button.disabled = False
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
upload_button.on_change("filename", upload_button_callback)
def append_upload_button_callback(_attr, _old, _new):
file_data = []
for f_str, f_name in zip(append_upload_button.value, append_upload_button.filename):
with io.StringIO(base64.b64decode(f_str).decode()) as file:
_, ext = os.path.splitext(f_name)
try:
file_data = pyzebra.parse_1D(file, ext)
except:
print(f"Error loading {f_name}")
continue
pyzebra.normalize_dataset(file_data, monitor_spinner.value)
pyzebra.merge_datasets(det_data, file_data)
if file_data:
_init_datatable()
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
append_upload_button = FileInput(accept=".ccl,.dat", multiple=True, width=200, disabled=True)
# for on_change("value", ...) or on_change("filename", ...),
# see https://github.com/bokeh/bokeh/issues/11461
append_upload_button.on_change("filename", append_upload_button_callback)
def monitor_spinner_callback(_attr, _old, new):
if det_data:
pyzebra.normalize_dataset(det_data, new)
_update_single_scan_plot()
_update_overview()
monitor_spinner = Spinner(title="Monitor:", mode="int", value=100_000, low=1, width=145)
monitor_spinner.on_change("value", monitor_spinner_callback)
def scan_motor_select_callback(_attr, _old, new): def scan_motor_select_callback(_attr, _old, new):
if dataset: if det_data:
for scan in dataset: for scan in det_data:
scan["scan_motor"] = new scan["scan_motor"] = new
_update_single_scan_plot() _update_single_scan_plot()
_update_overview() _update_overview()
@ -80,12 +263,12 @@ def create():
scan_motor_select.on_change("value", scan_motor_select_callback) scan_motor_select.on_change("value", scan_motor_select_callback)
def _update_table(): def _update_table():
fit_ok = [(1 if "fit" in scan else 0) for scan in dataset] fit_ok = [(1 if "fit" in scan else 0) for scan in det_data]
export = [scan["export"] for scan in dataset] export = [scan["export"] for scan in det_data]
if param_select.value == "user defined": if param_select.value == "user defined":
param = [None] * len(dataset) param = [None] * len(det_data)
else: else:
param = [scan[param_select.value] for scan in dataset] param = [scan[param_select.value] for scan in det_data]
scan_table_source.data.update(fit=fit_ok, export=export, param=param) scan_table_source.data.update(fit=fit_ok, export=export, param=param)
@ -98,19 +281,19 @@ def create():
x = scan[scan_motor] x = scan[scan_motor]
plot.axis[0].axis_label = scan_motor plot.axis[0].axis_label = scan_motor
scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err) plot_scatter_source.data.update(x=x, y=y, y_upper=y + y_err, y_lower=y - y_err)
fit = scan.get("fit") fit = scan.get("fit")
if fit is not None: if fit is not None:
x_fit = np.linspace(x[0], x[-1], 100) x_fit = np.linspace(x[0], x[-1], 100)
fit_source.data.update(x=x_fit, y=fit.eval(x=x_fit)) plot_fit_source.data.update(x=x_fit, y=fit.eval(x=x_fit))
x_bkg = [] x_bkg = []
y_bkg = [] y_bkg = []
xs_peak = [] xs_peak = []
ys_peak = [] ys_peak = []
comps = fit.eval_components(x=x_fit) comps = fit.eval_components(x=x_fit)
for i, model in enumerate(app_fitctrl.params): for i, model in enumerate(fit_params):
if "linear" in model: if "linear" in model:
x_bkg = x_fit x_bkg = x_fit
y_bkg = comps[f"f{i}_"] y_bkg = comps[f"f{i}_"]
@ -119,15 +302,16 @@ def create():
xs_peak.append(x_fit) xs_peak.append(x_fit)
ys_peak.append(comps[f"f{i}_"]) ys_peak.append(comps[f"f{i}_"])
bkg_source.data.update(x=x_bkg, y=y_bkg) plot_bkg_source.data.update(x=x_bkg, y=y_bkg)
peak_source.data.update(xs=xs_peak, ys=ys_peak) plot_peak_source.data.update(xs=xs_peak, ys=ys_peak)
fit_output_textinput.value = fit.fit_report()
else: else:
fit_source.data.update(x=[], y=[]) plot_fit_source.data.update(x=[], y=[])
bkg_source.data.update(x=[], y=[]) plot_bkg_source.data.update(x=[], y=[])
peak_source.data.update(xs=[], ys=[]) plot_peak_source.data.update(xs=[], ys=[])
fit_output_textinput.value = ""
app_fitctrl.update_result_textarea(scan)
def _update_overview(): def _update_overview():
xs = [] xs = []
@ -138,7 +322,7 @@ def create():
par = [] par = []
for s, p in enumerate(scan_table_source.data["param"]): for s, p in enumerate(scan_table_source.data["param"]):
if p is not None: if p is not None:
scan = dataset[s] scan = det_data[s]
scan_motor = scan["scan_motor"] scan_motor = scan["scan_motor"]
xs.append(scan[scan_motor]) xs.append(scan[scan_motor])
x.extend(scan[scan_motor]) x.extend(scan[scan_motor])
@ -147,14 +331,14 @@ def create():
param.append(float(p)) param.append(float(p))
par.extend(scan["counts"]) par.extend(scan["counts"])
if dataset: if det_data:
scan_motor = dataset[0]["scan_motor"] scan_motor = det_data[0]["scan_motor"]
ov_plot.axis[0].axis_label = scan_motor ov_plot.axis[0].axis_label = scan_motor
ov_param_plot.axis[0].axis_label = scan_motor ov_param_plot.axis[0].axis_label = scan_motor
ov_mline_source.data.update(xs=xs, ys=ys, param=param, color=color_palette(len(xs))) ov_plot_mline_source.data.update(xs=xs, ys=ys, param=param, color=color_palette(len(xs)))
ov_param_scatter_source.data.update(x=x, y=y) ov_param_plot_scatter_source.data.update(x=x, y=y)
if y: if y:
x1, x2 = min(x), max(x) x1, x2 = min(x), max(x)
@ -164,7 +348,7 @@ def create():
np.linspace(y1, y2, ov_param_plot.inner_height), np.linspace(y1, y2, ov_param_plot.inner_height),
) )
image = interpolate.griddata((x, y), par, (grid_x, grid_y)) image = interpolate.griddata((x, y), par, (grid_x, grid_y))
ov_param_image_source.data.update( ov_param_plot_image_source.data.update(
image=[image], x=[x1], y=[y1], dw=[x2 - x1], dh=[y2 - y1] image=[image], x=[x1], y=[y1], dw=[x2 - x1], dh=[y2 - y1]
) )
@ -179,7 +363,7 @@ def create():
y_range.bounds = (y1, y2) y_range.bounds = (y1, y2)
else: else:
ov_param_image_source.data.update(image=[], x=[], y=[], dw=[], dh=[]) ov_param_plot_image_source.data.update(image=[], x=[], y=[], dw=[], dh=[])
def _update_param_plot(): def _update_param_plot():
x = [] x = []
@ -187,50 +371,49 @@ def create():
y_lower = [] y_lower = []
y_upper = [] y_upper = []
fit_param = fit_param_select.value fit_param = fit_param_select.value
for s, p in zip(dataset, scan_table_source.data["param"]): for s, p in zip(det_data, scan_table_source.data["param"]):
if "fit" in s and fit_param: if "fit" in s and fit_param:
x.append(p) x.append(p)
param_fit_val = s["fit"].params[fit_param].value param_fit_val = s["fit"].params[fit_param].value
param_fit_std = s["fit"].params[fit_param].stderr param_fit_std = s["fit"].params[fit_param].stderr
if param_fit_std is None:
param_fit_std = 0
y.append(param_fit_val) y.append(param_fit_val)
y_lower.append(param_fit_val - param_fit_std) y_lower.append(param_fit_val - param_fit_std)
y_upper.append(param_fit_val + param_fit_std) y_upper.append(param_fit_val + param_fit_std)
param_scatter_source.data.update(x=x, y=y, y_lower=y_lower, y_upper=y_upper) param_plot_scatter_source.data.update(x=x, y=y, y_lower=y_lower, y_upper=y_upper)
def _monitor_change():
_update_single_scan_plot()
_update_overview()
app_inputctrl = app.InputControls(
dataset, app_dlfiles, on_file_open=_init_datatable, on_monitor_change=_monitor_change
)
# Main plot # Main plot
plot = figure( plot = Plot(
x_axis_label="Scan motor", x_range=DataRange1d(),
y_axis_label="Counts", y_range=DataRange1d(only_visible=True),
height=450, plot_height=450,
width=700, plot_width=700,
tools="pan,wheel_zoom,reset",
) )
scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0])) plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
plot.circle( plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
source=scatter_source, line_color="steelblue", fill_color="steelblue", legend_label="data"
plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
plot_scatter_source = ColumnDataSource(dict(x=[0], y=[0], y_upper=[0], y_lower=[0]))
plot_scatter = plot.add_glyph(
plot_scatter_source, Scatter(x="x", y="y", line_color="steelblue", fill_color="steelblue")
) )
plot.add_layout(Whisker(source=scatter_source, base="x", upper="y_upper", lower="y_lower")) plot.add_layout(Whisker(source=plot_scatter_source, base="x", upper="y_upper", lower="y_lower"))
fit_source = ColumnDataSource(dict(x=[0], y=[0])) plot_fit_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=fit_source, legend_label="best fit") plot_fit = plot.add_glyph(plot_fit_source, Line(x="x", y="y"))
bkg_source = ColumnDataSource(dict(x=[0], y=[0])) plot_bkg_source = ColumnDataSource(dict(x=[0], y=[0]))
plot.line(source=bkg_source, line_color="green", line_dash="dashed", legend_label="linear") plot_bkg = plot.add_glyph(
plot_bkg_source, Line(x="x", y="y", line_color="green", line_dash="dashed")
)
peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]])) plot_peak_source = ColumnDataSource(dict(xs=[[0]], ys=[[0]]))
plot.multi_line(source=peak_source, line_color="red", line_dash="dashed", legend_label="peak") plot_peak = plot.add_glyph(
plot_peak_source, MultiLine(xs="xs", ys="ys", line_color="red", line_dash="dashed")
)
fit_from_span = Span(location=None, dimension="height", line_dash="dashed") fit_from_span = Span(location=None, dimension="height", line_dash="dashed")
plot.add_layout(fit_from_span) plot.add_layout(fit_from_span)
@ -238,61 +421,80 @@ def create():
fit_to_span = Span(location=None, dimension="height", line_dash="dashed") fit_to_span = Span(location=None, dimension="height", line_dash="dashed")
plot.add_layout(fit_to_span) plot.add_layout(fit_to_span)
plot.y_range.only_visible = True plot.add_layout(
Legend(
items=[
("data", [plot_scatter]),
("best fit", [plot_fit]),
("peak", [plot_peak]),
("linear", [plot_bkg]),
],
location="top_left",
click_policy="hide",
)
)
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
plot.toolbar.logo = None plot.toolbar.logo = None
plot.legend.click_policy = "hide"
# Overview multilines plot # Overview multilines plot
ov_plot = figure( ov_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=450, plot_width=700)
x_axis_label="Scan motor",
y_axis_label="Counts",
height=450,
width=700,
tools="pan,wheel_zoom,reset",
)
ov_mline_source = ColumnDataSource(dict(xs=[], ys=[], param=[], color=[])) ov_plot.add_layout(LinearAxis(axis_label="Counts"), place="left")
ov_plot.multi_line(source=ov_mline_source, line_color="color") ov_plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
ov_plot.add_tools(HoverTool(tooltips=[("param", "@param")])) ov_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
ov_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
ov_plot_mline_source = ColumnDataSource(dict(xs=[], ys=[], param=[], color=[]))
ov_plot.add_glyph(ov_plot_mline_source, MultiLine(xs="xs", ys="ys", line_color="color"))
hover_tool = HoverTool(tooltips=[("param", "@param")])
ov_plot.add_tools(PanTool(), WheelZoomTool(), hover_tool, ResetTool())
ov_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
ov_plot.toolbar.logo = None ov_plot.toolbar.logo = None
# Overview params plot # Overview perams plot
ov_param_plot = figure( ov_param_plot = Plot(x_range=Range1d(), y_range=Range1d(), plot_height=450, plot_width=700)
x_axis_label="Scan motor",
y_axis_label="Param", ov_param_plot.add_layout(LinearAxis(axis_label="Param"), place="left")
x_range=Range1d(), ov_param_plot.add_layout(LinearAxis(axis_label="Scan motor"), place="below")
y_range=Range1d(),
height=450, ov_param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
width=700, ov_param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
tools="pan,wheel_zoom,reset",
)
color_mapper = LinearColorMapper(palette=Plasma256) color_mapper = LinearColorMapper(palette=Plasma256)
ov_param_image_source = ColumnDataSource(dict(image=[], x=[], y=[], dw=[], dh=[])) ov_param_plot_image_source = ColumnDataSource(dict(image=[], x=[], y=[], dw=[], dh=[]))
ov_param_plot.image(source=ov_param_image_source, color_mapper=color_mapper) ov_param_plot.add_glyph(
ov_param_plot_image_source,
Image(image="image", x="x", y="y", dw="dw", dh="dh", color_mapper=color_mapper),
)
ov_param_scatter_source = ColumnDataSource(dict(x=[], y=[])) ov_param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[]))
ov_param_plot.dot(source=ov_param_scatter_source, size=15, color="black") ov_param_plot.add_glyph(
ov_param_plot_scatter_source, Scatter(x="x", y="y", marker="dot", size=15),
)
ov_param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
ov_param_plot.toolbar.logo = None ov_param_plot.toolbar.logo = None
# Parameter plot # Parameter plot
param_plot = figure( param_plot = Plot(x_range=DataRange1d(), y_range=DataRange1d(), plot_height=400, plot_width=700)
x_axis_label="Parameter",
y_axis_label="Fit parameter",
height=400,
width=700,
tools="pan,wheel_zoom,reset",
)
param_scatter_source = ColumnDataSource(dict(x=[], y=[], y_upper=[], y_lower=[])) param_plot.add_layout(LinearAxis(axis_label="Fit parameter"), place="left")
param_plot.circle(source=param_scatter_source) param_plot.add_layout(LinearAxis(axis_label="Parameter"), place="below")
param_plot.add_layout(Grid(dimension=0, ticker=BasicTicker()))
param_plot.add_layout(Grid(dimension=1, ticker=BasicTicker()))
param_plot_scatter_source = ColumnDataSource(dict(x=[], y=[], y_upper=[], y_lower=[]))
param_plot.add_glyph(param_plot_scatter_source, Scatter(x="x", y="y"))
param_plot.add_layout( param_plot.add_layout(
Whisker(source=param_scatter_source, base="x", upper="y_upper", lower="y_lower") Whisker(source=param_plot_scatter_source, base="x", upper="y_upper", lower="y_lower")
) )
param_plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
param_plot.toolbar.logo = None param_plot.toolbar.logo = None
def fit_param_select_callback(_attr, _old, _new): def fit_param_select_callback(_attr, _old, _new):
@ -332,7 +534,7 @@ def create():
def scan_table_source_callback(_attr, _old, new): def scan_table_source_callback(_attr, _old, new):
# unfortunately, we don't know if the change comes from data update or user input # unfortunately, we don't know if the change comes from data update or user input
# also `old` and `new` are the same for non-scalars # also `old` and `new` are the same for non-scalars
for scan, export in zip(dataset, new["export"]): for scan, export in zip(det_data, new["export"]):
scan["export"] = export scan["export"] = export
_update_overview() _update_overview()
_update_param_plot() _update_param_plot()
@ -361,13 +563,13 @@ def create():
def merge_button_callback(): def merge_button_callback():
scan_into = _get_selected_scan() scan_into = _get_selected_scan()
scan_from = dataset[int(merge_from_select.value)] scan_from = det_data[int(merge_from_select.value)]
if scan_into is scan_from: if scan_into is scan_from:
log.warning("Selected scans for merging are identical") print("WARNING: Selected scans for merging are identical")
return return
pyzebra.merge_scans(scan_into, scan_from, log=log) pyzebra.merge_scans(scan_into, scan_from)
_update_table() _update_table()
_update_single_scan_plot() _update_single_scan_plot()
_update_overview() _update_overview()
@ -385,7 +587,7 @@ def create():
restore_button.on_click(restore_button_callback) restore_button.on_click(restore_button_callback)
def _get_selected_scan(): def _get_selected_scan():
return dataset[scan_table_source.selected.indices[0]] return det_data[scan_table_source.selected.indices[0]]
def param_select_callback(_attr, _old, _new): def param_select_callback(_attr, _old, _new):
_update_table() _update_table()
@ -398,26 +600,142 @@ def create():
) )
param_select.on_change("value", param_select_callback) param_select.on_change("value", param_select_callback)
app_fitctrl = app.FitControls()
def fit_from_spinner_callback(_attr, _old, new): def fit_from_spinner_callback(_attr, _old, new):
fit_from_span.location = new fit_from_span.location = new
app_fitctrl.from_spinner.on_change("value", fit_from_spinner_callback) fit_from_spinner = Spinner(title="Fit from:", width=145)
fit_from_spinner.on_change("value", fit_from_spinner_callback)
def fit_to_spinner_callback(_attr, _old, new): def fit_to_spinner_callback(_attr, _old, new):
fit_to_span.location = new fit_to_span.location = new
app_fitctrl.to_spinner.on_change("value", fit_to_spinner_callback) fit_to_spinner = Spinner(title="to:", width=145)
fit_to_spinner.on_change("value", fit_to_spinner_callback)
def fitparams_add_dropdown_callback(click):
# bokeh requires (str, str) for MultiSelect options
new_tag = f"{click.item}-{fitparams_select.tags[0]}"
fitparams_select.options.append((new_tag, click.item))
fit_params[new_tag] = fitparams_factory(click.item)
fitparams_select.tags[0] += 1
fitparams_add_dropdown = Dropdown(
label="Add fit function",
menu=[
("Linear", "linear"),
("Gaussian", "gaussian"),
("Voigt", "voigt"),
("Pseudo Voigt", "pvoigt"),
# ("Pseudo Voigt1", "pseudovoigt1"),
],
width=145,
)
fitparams_add_dropdown.on_click(fitparams_add_dropdown_callback)
def fitparams_select_callback(_attr, old, new):
# Avoid selection of multiple indicies (via Shift+Click or Ctrl+Click)
if len(new) > 1:
# drop selection to the previous one
fitparams_select.value = old
return
if len(old) > 1:
# skip unnecessary update caused by selection drop
return
if new:
fitparams_table_source.data.update(fit_params[new[0]])
else:
fitparams_table_source.data.update(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_select = MultiSelect(options=[], height=120, width=145)
fitparams_select.tags = [0]
fitparams_select.on_change("value", fitparams_select_callback)
def fitparams_remove_button_callback():
if fitparams_select.value:
sel_tag = fitparams_select.value[0]
del fit_params[sel_tag]
for elem in fitparams_select.options:
if elem[0] == sel_tag:
fitparams_select.options.remove(elem)
break
fitparams_select.value = []
fitparams_remove_button = Button(label="Remove fit function", width=145)
fitparams_remove_button.on_click(fitparams_remove_button_callback)
def fitparams_factory(function):
if function == "linear":
params = ["slope", "intercept"]
elif function == "gaussian":
params = ["amplitude", "center", "sigma"]
elif function == "voigt":
params = ["amplitude", "center", "sigma", "gamma"]
elif function == "pvoigt":
params = ["amplitude", "center", "sigma", "fraction"]
elif function == "pseudovoigt1":
params = ["amplitude", "center", "g_sigma", "l_sigma", "fraction"]
else:
raise ValueError("Unknown fit function")
n = len(params)
fitparams = dict(
param=params, value=[None] * n, vary=[True] * n, min=[None] * n, max=[None] * n,
)
if function == "linear":
fitparams["value"] = [0, 1]
fitparams["vary"] = [False, True]
fitparams["min"] = [None, 0]
elif function == "gaussian":
fitparams["min"] = [0, None, None]
return fitparams
fitparams_table_source = ColumnDataSource(dict(param=[], value=[], vary=[], min=[], max=[]))
fitparams_table = DataTable(
source=fitparams_table_source,
columns=[
TableColumn(field="param", title="Parameter", editor=CellEditor()),
TableColumn(field="value", title="Value", editor=NumberEditor()),
TableColumn(field="vary", title="Vary", editor=CheckboxEditor()),
TableColumn(field="min", title="Min", editor=NumberEditor()),
TableColumn(field="max", title="Max", editor=NumberEditor()),
],
height=200,
width=350,
index_position=None,
editable=True,
auto_edit=True,
)
# start with `background` and `gauss` fit functions added
fitparams_add_dropdown_callback(types.SimpleNamespace(item="linear"))
fitparams_add_dropdown_callback(types.SimpleNamespace(item="gaussian"))
fitparams_select.value = ["gaussian-1"] # add selection to gauss
fit_output_textinput = TextAreaInput(title="Fit results:", width=750, height=200)
def proc_all_button_callback(): def proc_all_button_callback():
app_fitctrl.fit_dataset(dataset) for scan in det_data:
if scan["export"]:
pyzebra.fit_scan(
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_single_scan_plot() _update_single_scan_plot()
_update_overview() _update_overview()
_update_table() _update_table()
for scan in dataset: for scan in det_data:
if "fit" in scan: if "fit" in scan:
options = list(scan["fit"].params.keys()) options = list(scan["fit"].params.keys())
fit_param_select.options = options fit_param_select.options = options
@ -428,13 +746,21 @@ def create():
proc_all_button.on_click(proc_all_button_callback) proc_all_button.on_click(proc_all_button_callback)
def proc_button_callback(): def proc_button_callback():
app_fitctrl.fit_scan(_get_selected_scan()) scan = _get_selected_scan()
pyzebra.fit_scan(
scan, fit_params, fit_from=fit_from_spinner.value, fit_to=fit_to_spinner.value
)
pyzebra.get_area(
scan,
area_method=AREA_METHODS[area_method_radiobutton.active],
lorentz=lorentz_checkbox.active,
)
_update_single_scan_plot() _update_single_scan_plot()
_update_overview() _update_overview()
_update_table() _update_table()
for scan in dataset: for scan in det_data:
if "fit" in scan: if "fit" in scan:
options = list(scan["fit"].params.keys()) options = list(scan["fit"].params.keys())
fit_param_select.options = options fit_param_select.options = options
@ -444,6 +770,11 @@ def create():
proc_button = Button(label="Process Current", width=145) proc_button = Button(label="Process Current", width=145)
proc_button.on_click(proc_button_callback) proc_button.on_click(proc_button_callback)
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5))
area_method_radiobutton = RadioGroup(labels=["Function", "Area"], active=0, width=145)
lorentz_checkbox = CheckboxGroup(labels=["Lorentz Correction"], width=145, margin=(13, 5, 5, 5))
export_preview_textinput = TextAreaInput(title="Export file preview:", width=450, height=400) export_preview_textinput = TextAreaInput(title="Export file preview:", width=450, height=400)
def _update_preview(): def _update_preview():
@ -451,7 +782,7 @@ def create():
temp_file = temp_dir + "/temp" temp_file = temp_dir + "/temp"
export_data = [] export_data = []
param_data = [] param_data = []
for scan, param in zip(dataset, scan_table_source.data["param"]): for scan, param in zip(det_data, scan_table_source.data["param"]):
if scan["export"] and param: if scan["export"] and param:
export_data.append(scan) export_data.append(scan)
param_data.append(param) param_data.append(param)
@ -470,49 +801,40 @@ def create():
content = "" content = ""
file_content.append(content) file_content.append(content)
app_dlfiles.set_contents(file_content) js_data.data.update(content=file_content)
export_preview_textinput.value = exported_content export_preview_textinput.value = exported_content
area_method_div = Div(text="Intensity:", margin=(5, 5, 0, 5)) save_button = Button(label="Download File", button_type="success", width=220)
save_button.js_on_click(CustomJS(args={"js_data": js_data}, code=javaScript))
fitpeak_controls = row( fitpeak_controls = row(
column( column(fitparams_add_dropdown, fitparams_select, fitparams_remove_button),
app_fitctrl.add_function_button, fitparams_table,
app_fitctrl.function_select,
app_fitctrl.remove_function_button,
),
app_fitctrl.params_table,
Spacer(width=20), Spacer(width=20),
column( column(fit_from_spinner, lorentz_checkbox, area_method_div, area_method_radiobutton),
app_fitctrl.from_spinner, column(fit_to_spinner, proc_button, proc_all_button),
app_fitctrl.lorentz_checkbox,
area_method_div,
app_fitctrl.area_method_radiogroup,
),
column(app_fitctrl.to_spinner, proc_button, proc_all_button),
) )
scan_layout = column( scan_layout = column(
scan_table, scan_table,
row(app_inputctrl.monitor_spinner, scan_motor_select, param_select), row(monitor_spinner, scan_motor_select, param_select),
row(column(Spacer(height=19), row(restore_button, merge_button)), merge_from_select), row(column(Spacer(height=19), row(restore_button, merge_button)), merge_from_select),
) )
upload_div = Div(text="or upload new .ccl/.dat files:", margin=(5, 5, 0, 5))
append_upload_div = Div(text="append extra files:", margin=(5, 5, 0, 5))
import_layout = column( import_layout = column(
app_inputctrl.filelist_select, file_select,
row(app_inputctrl.open_button, app_inputctrl.append_button), row(file_open_button, file_append_button),
upload_div, upload_div,
app_inputctrl.upload_button, upload_button,
append_upload_div, append_upload_div,
app_inputctrl.append_upload_button, append_upload_button,
) )
export_layout = column(export_preview_textinput, row(app_dlfiles.button)) export_layout = column(export_preview_textinput, row(save_button))
tab_layout = column( tab_layout = column(
row(import_layout, scan_layout, plots, Spacer(width=30), export_layout), row(import_layout, scan_layout, plots, Spacer(width=30), export_layout),
row(fitpeak_controls, app_fitctrl.result_textarea), row(fitpeak_controls, fit_output_textinput),
) )
return Panel(child=tab_layout, title="param study") return Panel(child=tab_layout, title="param study")

View File

@ -1,442 +0,0 @@
import base64
import io
import os
import numpy as np
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import (
Button,
CheckboxGroup,
ColorBar,
ColumnDataSource,
DataRange1d,
Div,
FileInput,
LinearColorMapper,
LogColorMapper,
NumericInput,
Panel,
RadioGroup,
Select,
Spacer,
Spinner,
TextInput,
)
from bokeh.plotting import figure
from scipy import interpolate
import pyzebra
from pyzebra import app
from pyzebra.app.panel_hdf_viewer import calculate_hkl
def create():
doc = curdoc()
log = doc.logger
_update_slice = None
measured_data_div = Div(text="Measured <b>HDF</b> data:")
measured_data = FileInput(accept=".hdf", multiple=True, width=200)
upload_hkl_div = Div(text="Open hkl/mhkl data:")
upload_hkl_fi = FileInput(accept=".hkl,.mhkl", multiple=True, width=200)
def _prepare_plotting():
flag_ub = bool(redef_ub_cb.active)
flag_lattice = bool(redef_lattice_cb.active)
# Define horizontal direction of plotting plane, vertical direction will be calculated
# automatically
x_dir = list(map(float, hkl_in_plane_x.value.split()))
# Define direction orthogonal to plotting plane. Together with orth_cut, this parameter also
# defines the position of the cut, ie cut will be taken at orth_dir = [x,y,z]*orth_cut +- delta,
# where delta is max distance a data point can have from cut in rlu units
orth_dir = list(map(float, hkl_normal.value.split()))
# Load data files
md_fnames = measured_data.filename
md_fdata = measured_data.value
for ind, (fname, fdata) in enumerate(zip(md_fnames, md_fdata)):
# Read data
try:
det_data = pyzebra.read_detector_data(io.BytesIO(base64.b64decode(fdata)))
except Exception as e:
log.exception(e)
return None
if ind == 0:
if not flag_ub:
redef_ub_ti.value = " ".join(map(str, det_data["ub"].ravel()))
if not flag_lattice:
redef_lattice_ti.value = " ".join(map(str, det_data["cell"]))
num_slices = np.shape(det_data["counts"])[0]
# Change parameter
if flag_ub:
ub = list(map(float, redef_ub_ti.value.strip().split()))
det_data["ub"] = np.array(ub).reshape(3, 3)
# Convert h k l for all images in file
h_temp = np.empty(np.shape(det_data["counts"]))
k_temp = np.empty(np.shape(det_data["counts"]))
l_temp = np.empty(np.shape(det_data["counts"]))
for i in range(num_slices):
h_temp[i], k_temp[i], l_temp[i] = calculate_hkl(det_data, i)
# Append to matrix
if ind == 0:
h = h_temp
k = k_temp
l = l_temp
I_matrix = det_data["counts"]
else:
h = np.append(h, h_temp, axis=0)
k = np.append(k, k_temp, axis=0)
l = np.append(l, l_temp, axis=0)
I_matrix = np.append(I_matrix, det_data["counts"], axis=0)
if flag_lattice:
vals = list(map(float, redef_lattice_ti.value.strip().split()))
lattice = np.array(vals)
else:
lattice = det_data["cell"]
# Define matrix for converting to cartesian coordinates and back
alpha = lattice[3] * np.pi / 180.0
beta = lattice[4] * np.pi / 180.0
gamma = lattice[5] * np.pi / 180.0
# reciprocal angle parameters
beta_star = np.arccos(
(np.cos(alpha) * np.cos(gamma) - np.cos(beta)) / (np.sin(alpha) * np.sin(gamma))
)
gamma_star = np.arccos(
(np.cos(alpha) * np.cos(beta) - np.cos(gamma)) / (np.sin(alpha) * np.sin(beta))
)
# conversion matrix:
M = np.array(
[
[1, 1 * np.cos(gamma_star), 1 * np.cos(beta_star)],
[0, 1 * np.sin(gamma_star), -np.sin(beta_star) * np.cos(alpha)],
[0, 0, 1 * np.sin(beta_star) * np.sin(alpha)],
]
)
# Get last lattice vector
y_dir = np.cross(x_dir, orth_dir) # Second axes of plotting plane
# Rescale such that smallest element of y-dir vector is 1
y_dir2 = y_dir[y_dir != 0]
min_val = np.min(np.abs(y_dir2))
y_dir = y_dir / min_val
# Possibly flip direction of ydir:
if y_dir[np.argmax(abs(y_dir))] < 0:
y_dir = -y_dir
# Display the resulting y_dir
hkl_in_plane_y.value = " ".join([f"{val:.1f}" for val in y_dir])
# # Save length of lattice vectors
# x_length = np.linalg.norm(x_dir)
# y_length = np.linalg.norm(y_dir)
# # Save str for labels
# xlabel_str = " ".join(map(str, x_dir))
# ylabel_str = " ".join(map(str, y_dir))
# Normalize lattice vectors
y_dir = y_dir / np.linalg.norm(y_dir)
x_dir = x_dir / np.linalg.norm(x_dir)
orth_dir = orth_dir / np.linalg.norm(orth_dir)
# Calculate cartesian equivalents of lattice vectors
x_c = np.matmul(M, x_dir)
y_c = np.matmul(M, y_dir)
o_c = np.matmul(M, orth_dir)
# Calulcate vertical direction in plotting plame
y_vert = np.cross(x_c, o_c) # verical direction in plotting plane
if y_vert[np.argmax(abs(y_vert))] < 0:
y_vert = -y_vert
y_vert = y_vert / np.linalg.norm(y_vert)
# Normalize all directions
y_c = y_c / np.linalg.norm(y_c)
x_c = x_c / np.linalg.norm(x_c)
o_c = o_c / np.linalg.norm(o_c)
# Convert all hkls to cartesian
hkl = [[h, k, l]]
hkl = np.transpose(hkl)
hkl_c = np.matmul(M, hkl)
# Prepare hkl/mhkl data
hkl_coord = []
for j, fname in enumerate(upload_hkl_fi.filename):
with io.StringIO(base64.b64decode(upload_hkl_fi.value[j]).decode()) as file:
_, ext = os.path.splitext(fname)
try:
fdata = pyzebra.parse_hkl(file, ext)
except Exception as e:
log.exception(e)
return
for ind in range(len(fdata["counts"])):
# Recognize k_flag_vec
hkl = np.array([fdata["h"][ind], fdata["k"][ind], fdata["l"][ind]])
# Save data
hkl_coord.append(hkl)
def _update_slice():
# Where should cut be along orthogonal direction (Mutliplication factor onto orth_dir)
orth_cut = hkl_cut.value
# Width of cut
delta = hkl_delta.value
# Calculate distance of all points to plane
Q = np.array(o_c) * orth_cut
N = o_c / np.sqrt(np.sum(o_c**2))
v = np.empty(np.shape(hkl_c))
v[:, :, :, :, 0] = hkl_c[:, :, :, :, 0] - Q
dist = np.abs(np.dot(N, v))
dist = np.squeeze(dist)
dist = np.transpose(dist)
# Find points within acceptable distance of plane defined by o_c
ind = np.where(abs(dist) < delta)
if ind[0].size == 0:
image_source.data.update(image=[np.zeros((1, 1))])
return
# Project points onto axes
x = np.dot(x_c / np.sqrt(np.sum(x_c**2)), hkl_c)
y = np.dot(y_c / np.sqrt(np.sum(y_c**2)), hkl_c)
# take care of dimensions
x = np.squeeze(x)
x = np.transpose(x)
y = np.squeeze(y)
y = np.transpose(y)
# Get slices:
x_slice = x[ind]
y_slice = y[ind]
I_slice = I_matrix[ind]
# Meshgrid limits for plotting
if auto_range_cb.active:
min_x = np.min(x_slice)
max_x = np.max(x_slice)
min_y = np.min(y_slice)
max_y = np.max(y_slice)
xrange_min_ni.value = min_x
xrange_max_ni.value = max_x
yrange_min_ni.value = min_y
yrange_max_ni.value = max_y
else:
min_x = xrange_min_ni.value
max_x = xrange_max_ni.value
min_y = yrange_min_ni.value
max_y = yrange_max_ni.value
delta_x = xrange_step_ni.value
delta_y = yrange_step_ni.value
# Create interpolated mesh grid for plotting
grid_x, grid_y = np.mgrid[min_x:max_x:delta_x, min_y:max_y:delta_y]
I = interpolate.griddata((x_slice, y_slice), I_slice, (grid_x, grid_y))
# Update plot
display_min_ni.value = 0
display_max_ni.value = np.max(I_slice) * 0.25
image_source.data.update(
image=[I.T], x=[min_x], dw=[max_x - min_x], y=[min_y], dh=[max_y - min_y]
)
scan_x, scan_y = [], []
for j in range(len(hkl_coord)):
# Get middle hkl from list
hklm = M @ hkl_coord[j]
# Decide if point is in the cut
proj = np.dot(hklm, o_c)
if abs(proj - orth_cut) >= delta:
continue
# Project onto axes
hklmx = np.dot(hklm, x_c)
hklmy = np.dot(hklm, y_vert)
# Plot middle point of scan
scan_x.append(hklmx)
scan_y.append(hklmy)
scatter_source.data.update(x=scan_x, y=scan_y)
return _update_slice
def plot_file_callback():
nonlocal _update_slice
_update_slice = _prepare_plotting()
_update_slice()
plot_file = Button(label="Plot selected file(s)", button_type="primary", width=200)
plot_file.on_click(plot_file_callback)
plot = figure(
x_range=DataRange1d(),
y_range=DataRange1d(),
height=550 + 27,
width=550 + 117,
tools="pan,wheel_zoom,reset",
)
plot.toolbar.logo = None
lin_color_mapper = LinearColorMapper(nan_color=(0, 0, 0, 0), low=0, high=1)
log_color_mapper = LogColorMapper(nan_color=(0, 0, 0, 0), low=0, high=1)
image_source = ColumnDataSource(dict(image=[np.zeros((1, 1))], x=[0], y=[0], dw=[1], dh=[1]))
plot_image = plot.image(source=image_source, color_mapper=lin_color_mapper)
lin_color_bar = ColorBar(color_mapper=lin_color_mapper, width=15)
log_color_bar = ColorBar(color_mapper=log_color_mapper, width=15, visible=False)
plot.add_layout(lin_color_bar, "right")
plot.add_layout(log_color_bar, "right")
scatter_source = ColumnDataSource(dict(x=[], y=[]))
plot.scatter(source=scatter_source, size=4, fill_color="green", line_color="green")
hkl_div = Div(text="HKL:", margin=(5, 5, 0, 5))
hkl_normal = TextInput(title="normal", value="0 0 1", width=70)
def hkl_cut_callback(_attr, _old, _new):
if _update_slice is not None:
_update_slice()
hkl_cut = Spinner(title="cut", value=0, step=0.1, width=70)
hkl_cut.on_change("value_throttled", hkl_cut_callback)
hkl_delta = NumericInput(title="delta", value=0.1, mode="float", width=70)
hkl_in_plane_x = TextInput(title="in-plane X", value="1 0 0", width=70)
hkl_in_plane_y = TextInput(title="in-plane Y", value="", width=100, disabled=True)
def redef_lattice_cb_callback(_attr, _old, new):
if 0 in new:
redef_lattice_ti.disabled = False
else:
redef_lattice_ti.disabled = True
redef_lattice_cb = CheckboxGroup(labels=["Redefine lattice:"], width=110)
redef_lattice_cb.on_change("active", redef_lattice_cb_callback)
redef_lattice_ti = TextInput(width=490, disabled=True)
def redef_ub_cb_callback(_attr, _old, new):
if 0 in new:
redef_ub_ti.disabled = False
else:
redef_ub_ti.disabled = True
redef_ub_cb = CheckboxGroup(labels=["Redefine UB:"], width=110)
redef_ub_cb.on_change("active", redef_ub_cb_callback)
redef_ub_ti = TextInput(width=490, disabled=True)
def colormap_select_callback(_attr, _old, new):
lin_color_mapper.palette = new
log_color_mapper.palette = new
colormap_select = Select(
title="Colormap:",
options=[("Greys256", "greys"), ("Plasma256", "plasma"), ("Cividis256", "cividis")],
width=100,
)
colormap_select.on_change("value", colormap_select_callback)
colormap_select.value = "Plasma256"
def display_min_ni_callback(_attr, _old, new):
lin_color_mapper.low = new
log_color_mapper.low = new
display_min_ni = NumericInput(title="Intensity min:", value=0, mode="float", width=70)
display_min_ni.on_change("value", display_min_ni_callback)
def display_max_ni_callback(_attr, _old, new):
lin_color_mapper.high = new
log_color_mapper.high = new
display_max_ni = NumericInput(title="max:", value=1, mode="float", width=70)
display_max_ni.on_change("value", display_max_ni_callback)
def colormap_scale_rg_callback(_attr, _old, new):
if new == 0: # Linear
plot_image.glyph.color_mapper = lin_color_mapper
lin_color_bar.visible = True
log_color_bar.visible = False
else: # Logarithmic
if display_min_ni.value > 0 and display_max_ni.value > 0:
plot_image.glyph.color_mapper = log_color_mapper
lin_color_bar.visible = False
log_color_bar.visible = True
else:
colormap_scale_rg.active = 0
colormap_scale_rg = RadioGroup(labels=["Linear", "Logarithmic"], active=0, width=100)
colormap_scale_rg.on_change("active", colormap_scale_rg_callback)
xrange_min_ni = NumericInput(title="x range min:", value=0, mode="float", width=70)
xrange_max_ni = NumericInput(title="max:", value=1, mode="float", width=70)
xrange_step_ni = NumericInput(title="x mesh:", value=0.01, mode="float", width=70)
yrange_min_ni = NumericInput(title="y range min:", value=0, mode="float", width=70)
yrange_max_ni = NumericInput(title="max:", value=1, mode="float", width=70)
yrange_step_ni = NumericInput(title="y mesh:", value=0.01, mode="float", width=70)
def auto_range_cb_callback(_attr, _old, new):
if 0 in new:
xrange_min_ni.disabled = True
xrange_max_ni.disabled = True
yrange_min_ni.disabled = True
yrange_max_ni.disabled = True
else:
xrange_min_ni.disabled = False
xrange_max_ni.disabled = False
yrange_min_ni.disabled = False
yrange_max_ni.disabled = False
auto_range_cb = CheckboxGroup(labels=["Auto range:"], width=110)
auto_range_cb.on_change("active", auto_range_cb_callback)
auto_range_cb.active = [0]
column1_layout = column(
row(
column(row(measured_data_div, measured_data), row(upload_hkl_div, upload_hkl_fi)),
plot_file,
),
row(
plot,
column(
hkl_div,
row(hkl_normal, hkl_cut, hkl_delta),
row(hkl_in_plane_x, hkl_in_plane_y),
row(colormap_select, column(Spacer(height=15), colormap_scale_rg)),
row(display_min_ni, display_max_ni),
row(column(Spacer(height=19), auto_range_cb)),
row(xrange_min_ni, xrange_max_ni),
row(yrange_min_ni, yrange_max_ni),
row(xrange_step_ni, yrange_step_ni),
),
),
row(column(Spacer(height=7), redef_lattice_cb), redef_lattice_ti),
row(column(Spacer(height=7), redef_ub_cb), redef_ub_ti),
)
column2_layout = app.PlotHKL().layout
tab_layout = row(column1_layout, Spacer(width=50), column2_layout)
return Panel(child=tab_layout, title="plot data")

View File

@ -21,7 +21,6 @@ import pyzebra
def create(): def create():
doc = curdoc() doc = curdoc()
log = doc.logger
events_data = doc.events_data events_data = doc.events_data
npeaks_spinner = Spinner(title="Number of peaks from hdf_view panel:", disabled=True) npeaks_spinner = Spinner(title="Number of peaks from hdf_view panel:", disabled=True)
@ -64,8 +63,8 @@ def create():
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
text=True, text=True,
) )
log.info(" ".join(comp_proc.args)) print(" ".join(comp_proc.args))
log.info(comp_proc.stdout) print(comp_proc.stdout)
# prepare an event file # prepare an event file
diff_vec = [] diff_vec = []
@ -95,9 +94,9 @@ def create():
f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n" f"{x_pos} {y_pos} {intensity} {snr_cnts} {dv1} {dv2} {dv3} {d_spacing}\n"
) )
log.info(f"Content of {temp_event_file}:") print(f"Content of {temp_event_file}:")
with open(temp_event_file) as f: with open(temp_event_file) as f:
log.info(f.read()) print(f.read())
comp_proc = subprocess.run( comp_proc = subprocess.run(
[ [
@ -124,12 +123,12 @@ def create():
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
text=True, text=True,
) )
log.info(" ".join(comp_proc.args)) print(" ".join(comp_proc.args))
log.info(comp_proc.stdout) print(comp_proc.stdout)
spind_out_file = os.path.join(temp_dir, "spind.txt") spind_out_file = os.path.join(temp_dir, "spind.txt")
spind_res = dict( spind_res = dict(
label=[], crystal_id=[], match_rate=[], matched_peaks=[], column_5=[], ub_matrix=[] label=[], crystal_id=[], match_rate=[], matched_peaks=[], column_5=[], ub_matrix=[],
) )
try: try:
with open(spind_out_file) as f_out: with open(spind_out_file) as f_out:
@ -144,15 +143,16 @@ def create():
# last digits are spind UB matrix # last digits are spind UB matrix
vals = list(map(float, c_rest)) vals = list(map(float, c_rest))
ub_matrix_spind = np.transpose(np.array(vals).reshape(3, 3)) ub_matrix_spind = np.transpose(np.array(vals).reshape(3, 3))
ub_matrices.append(ub_matrix_spind) ub_matrix = np.linalg.inv(ub_matrix_spind)
ub_matrices.append(ub_matrix)
spind_res["ub_matrix"].append(str(ub_matrix_spind * 1e-10)) spind_res["ub_matrix"].append(str(ub_matrix_spind * 1e-10))
log.info(f"Content of {spind_out_file}:") print(f"Content of {spind_out_file}:")
with open(spind_out_file) as f: with open(spind_out_file) as f:
log.info(f.read()) print(f.read())
except FileNotFoundError: except FileNotFoundError:
log.warning("No results from spind") print("No results from spind")
results_table_source.data.update(spind_res) results_table_source.data.update(spind_res)
@ -168,11 +168,11 @@ def create():
def results_table_select_callback(_attr, old, new): def results_table_select_callback(_attr, old, new):
if new: if new:
ind = new[0] ind = new[0]
ub_matrix_spind = ub_matrices[ind] ub_matrix = ub_matrices[ind]
res = "" res = ""
for vec in diff_vec: for vec in diff_vec:
res += f"{np.linalg.inv(ub_matrix_spind) @ vec}\n" res += f"{ub_matrix @ vec}\n"
ub_matrix_textareainput.value = str(ub_matrix_spind * 1e-10) ub_matrix_textareainput.value = str(ub_matrix * 1e10)
hkl_textareainput.value = res hkl_textareainput.value = res
else: else:
ub_matrix_textareainput.value = "" ub_matrix_textareainput.value = ""

View File

@ -1,549 +0,0 @@
import base64
import io
import os
import numpy as np
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import (
Arrow,
Button,
CheckboxGroup,
ColumnDataSource,
Div,
FileInput,
HoverTool,
Legend,
LegendItem,
NormalHead,
NumericInput,
RadioGroup,
Spinner,
TextAreaInput,
TextInput,
)
from bokeh.palettes import Dark2
from bokeh.plotting import figure
from scipy.integrate import simpson, trapezoid
import pyzebra
class PlotHKL:
def __init__(self):
doc = curdoc()
log = doc.logger
_update_slice = None
measured_data_div = Div(text="Measured <b>CCL</b> data:")
measured_data = FileInput(accept=".ccl", multiple=True, width=200)
upload_hkl_div = Div(text="Open hkl/mhkl data:")
upload_hkl_fi = FileInput(accept=".hkl,.mhkl", multiple=True, width=200)
min_grid_x = -10
max_grid_x = 10
min_grid_y = -10
max_grid_y = 10
cmap = Dark2[8]
syms = ["circle", "inverted_triangle", "square", "diamond", "star", "triangle"]
def _prepare_plotting():
orth_dir = list(map(float, hkl_normal.value.split()))
x_dir = list(map(float, hkl_in_plane_x.value.split()))
k = np.array(k_vectors.value.split()).astype(float).reshape(-1, 3)
tol_k = tol_k_ni.value
# multiplier for resolution function (in case of samples with large mosaicity)
res_mult = res_mult_ni.value
md_fnames = measured_data.filename
md_fdata = measured_data.value
# Load first data file, read angles and define matrices to perform conversion to cartesian
# coordinates and back
with io.StringIO(base64.b64decode(md_fdata[0]).decode()) as file:
_, ext = os.path.splitext(md_fnames[0])
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
return None
alpha = file_data[0]["alpha_cell"] * np.pi / 180.0
beta = file_data[0]["beta_cell"] * np.pi / 180.0
gamma = file_data[0]["gamma_cell"] * np.pi / 180.0
# reciprocal angle parameters
beta_star = np.arccos(
(np.cos(alpha) * np.cos(gamma) - np.cos(beta)) / (np.sin(alpha) * np.sin(gamma))
)
gamma_star = np.arccos(
(np.cos(alpha) * np.cos(beta) - np.cos(gamma)) / (np.sin(alpha) * np.sin(beta))
)
# conversion matrix
M = np.array(
[
[1, np.cos(gamma_star), np.cos(beta_star)],
[0, np.sin(gamma_star), -np.sin(beta_star) * np.cos(alpha)],
[0, 0, np.sin(beta_star) * np.sin(alpha)],
]
)
# Get last lattice vector
y_dir = np.cross(x_dir, orth_dir) # Second axes of plotting plane
# Rescale such that smallest element of y-dir vector is 1
y_dir2 = y_dir[y_dir != 0]
min_val = np.min(np.abs(y_dir2))
y_dir = y_dir / min_val
# Possibly flip direction of ydir:
if y_dir[np.argmax(abs(y_dir))] < 0:
y_dir = -y_dir
# Display the resulting y_dir
hkl_in_plane_y.value = " ".join([f"{val:.1f}" for val in y_dir])
# Save length of lattice vectors
x_length = np.linalg.norm(x_dir)
y_length = np.linalg.norm(y_dir)
# Save str for labels
xlabel_str = " ".join(map(str, x_dir))
ylabel_str = " ".join(map(str, y_dir))
# Normalize lattice vectors
y_dir = y_dir / np.linalg.norm(y_dir)
x_dir = x_dir / np.linalg.norm(x_dir)
orth_dir = orth_dir / np.linalg.norm(orth_dir)
# Calculate cartesian equivalents of lattice vectors
x_c = np.matmul(M, x_dir)
y_c = np.matmul(M, y_dir)
o_c = np.matmul(M, orth_dir)
# Calulcate vertical direction in plotting plame
y_vert = np.cross(x_c, o_c) # verical direction in plotting plane
if y_vert[np.argmax(abs(y_vert))] < 0:
y_vert = -y_vert
y_vert = y_vert / np.linalg.norm(y_vert)
# Normalize all directions
y_c = y_c / np.linalg.norm(y_c)
x_c = x_c / np.linalg.norm(x_c)
o_c = o_c / np.linalg.norm(o_c)
# Read all data
hkl_coord = []
intensity_vec = []
k_flag_vec = []
file_flag_vec = []
res_vec = []
res_N = 10
for j, md_fname in enumerate(md_fnames):
with io.StringIO(base64.b64decode(md_fdata[j]).decode()) as file:
_, ext = os.path.splitext(md_fname)
try:
file_data = pyzebra.parse_1D(file, ext, log=log)
except Exception as e:
log.exception(e)
return None
pyzebra.normalize_dataset(file_data)
# Loop throguh all data
for scan in file_data:
om = scan["omega"]
gammad = scan["twotheta"]
chi = scan["chi"]
phi = scan["phi"]
nud = 0 # 1d detector
ub_inv = np.linalg.inv(scan["ub"])
counts = scan["counts"]
wave = scan["wavelength"]
# Calculate resolution in degrees
expr = np.tan(gammad / 2 * np.pi / 180)
fwhm = np.sqrt(0.4639 * expr**2 - 0.4452 * expr + 0.1506) * res_mult
res = 4 * np.pi / wave * np.sin(fwhm * np.pi / 180)
# Get first and final hkl
hkl1 = pyzebra.ang2hkl_1d(wave, gammad, om[0], chi, phi, nud, ub_inv)
hkl2 = pyzebra.ang2hkl_1d(wave, gammad, om[-1], chi, phi, nud, ub_inv)
# Get hkl at best intensity
hkl_m = pyzebra.ang2hkl_1d(
wave, gammad, om[np.argmax(counts)], chi, phi, nud, ub_inv
)
# Estimate intensity for marker size scaling
y_bkg = [counts[0], counts[-1]]
x_bkg = [om[0], om[-1]]
c = int(simpson(counts, x=om) - trapezoid(y_bkg, x=x_bkg))
# Recognize k_flag_vec
reduced_hkl_m = np.minimum(1 - hkl_m % 1, hkl_m % 1)
for ind, _k in enumerate(k):
if all(np.abs(reduced_hkl_m - _k) < tol_k):
k_flag_vec.append(ind)
break
else:
# not required
continue
# Save data
hkl_coord.append([hkl1, hkl2, hkl_m])
intensity_vec.append(c)
file_flag_vec.append(j)
res_vec.append(res)
x_spacing = np.dot(M @ x_dir, x_c) * x_length
y_spacing = np.dot(M @ y_dir, y_vert) * y_length
y_spacingx = np.dot(M @ y_dir, x_c) * y_length
# Plot coordinate system
arrow1.x_end = x_spacing
arrow1.y_end = 0
arrow2.x_end = y_spacingx
arrow2.y_end = y_spacing
# Add labels
kvect_source.data.update(
x=[x_spacing / 4, -0.1],
y=[x_spacing / 4 - 0.5, y_spacing / 2],
text=[xlabel_str, ylabel_str],
)
# Plot grid lines
xs, ys = [], []
xs_minor, ys_minor = [], []
for yy in np.arange(min_grid_y, max_grid_y, 1):
# Calculate end and start point
hkl1 = min_grid_x * x_dir + yy * y_dir
hkl2 = max_grid_x * x_dir + yy * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs.append([x1, x2])
ys.append([y1, y2])
for xx in np.arange(min_grid_x, max_grid_x, 1):
# Calculate end and start point
hkl1 = xx * x_dir + min_grid_y * y_dir
hkl2 = xx * x_dir + max_grid_y * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs.append([x1, x2])
ys.append([y1, y2])
for yy in np.arange(min_grid_y, max_grid_y, 0.5):
# Calculate end and start point
hkl1 = min_grid_x * x_dir + yy * y_dir
hkl2 = max_grid_x * x_dir + yy * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs_minor.append([x1, x2])
ys_minor.append([y1, y2])
for xx in np.arange(min_grid_x, max_grid_x, 0.5):
# Calculate end and start point
hkl1 = xx * x_dir + min_grid_y * y_dir
hkl2 = xx * x_dir + max_grid_y * y_dir
hkl1 = M @ hkl1
hkl2 = M @ hkl2
# Project points onto axes
x1 = np.dot(x_c, hkl1) * x_length
y1 = np.dot(y_vert, hkl1) * y_length
x2 = np.dot(x_c, hkl2) * x_length
y2 = np.dot(y_vert, hkl2) * y_length
xs_minor.append([x1, x2])
ys_minor.append([y1, y2])
grid_source.data.update(xs=xs, ys=ys)
minor_grid_source.data.update(xs=xs_minor, ys=ys_minor)
# Prepare hkl/mhkl data
hkl_coord2 = []
for j, fname in enumerate(upload_hkl_fi.filename):
with io.StringIO(base64.b64decode(upload_hkl_fi.value[j]).decode()) as file:
_, ext = os.path.splitext(fname)
try:
fdata = pyzebra.parse_hkl(file, ext)
except Exception as e:
log.exception(e)
return
for ind in range(len(fdata["counts"])):
# Recognize k_flag_vec
hkl = np.array([fdata["h"][ind], fdata["k"][ind], fdata["l"][ind]])
# Save data
hkl_coord2.append(hkl)
def _update_slice():
cut_tol = hkl_delta.value
cut_or = hkl_cut.value
# different symbols based on file number
file_flag = 0 in disting_opt_cb.active
# scale marker size according to intensity
intensity_flag = 1 in disting_opt_cb.active
# use color to mark different propagation vectors
prop_legend_flag = 2 in disting_opt_cb.active
# use resolution ellipsis
res_flag = disting_opt_rb.active
el_x, el_y, el_w, el_h, el_c = [], [], [], [], []
scan_xs, scan_ys, scan_x, scan_y = [], [], [], []
scan_m, scan_s, scan_c, scan_l, scan_hkl = [], [], [], [], []
for j in range(len(hkl_coord)):
# Get middle hkl from list
hklm = M @ hkl_coord[j][2]
# Decide if point is in the cut
proj = np.dot(hklm, o_c)
if abs(proj - cut_or) >= cut_tol:
continue
hkl1 = M @ hkl_coord[j][0]
hkl2 = M @ hkl_coord[j][1]
# Project onto axes
hkl1x = np.dot(hkl1, x_c)
hkl1y = np.dot(hkl1, y_vert)
hkl2x = np.dot(hkl2, x_c)
hkl2y = np.dot(hkl2, y_vert)
hklmx = np.dot(hklm, x_c)
hklmy = np.dot(hklm, y_vert)
if intensity_flag:
markersize = max(6, int(intensity_vec[j] / max(intensity_vec) * 30))
else:
markersize = 6
if file_flag:
plot_symbol = syms[file_flag_vec[j]]
else:
plot_symbol = "circle"
if prop_legend_flag:
col_value = cmap[k_flag_vec[j]]
else:
col_value = "black"
if res_flag:
# Generate series of circles along scan line
res = res_vec[j]
el_x.extend(np.linspace(hkl1x, hkl2x, num=res_N))
el_y.extend(np.linspace(hkl1y, hkl2y, num=res_N))
el_w.extend([res / 2] * res_N)
el_h.extend([res / 2] * res_N)
el_c.extend([col_value] * res_N)
else:
# Plot scan line
scan_xs.append([hkl1x, hkl2x])
scan_ys.append([hkl1y, hkl2y])
# Plot middle point of scan
scan_x.append(hklmx)
scan_y.append(hklmy)
scan_m.append(plot_symbol)
scan_s.append(markersize)
# Color and legend label
scan_c.append(col_value)
scan_l.append(md_fnames[file_flag_vec[j]])
scan_hkl.append(hkl_coord[j][2])
ellipse_source.data.update(x=el_x, y=el_y, width=el_w, height=el_h, c=el_c)
scan_source.data.update(
xs=scan_xs,
ys=scan_ys,
x=scan_x,
y=scan_y,
m=scan_m,
s=scan_s,
c=scan_c,
l=scan_l,
hkl=scan_hkl,
)
# Legend items for different file entries (symbol)
legend_items = []
if not res_flag and file_flag:
labels, inds = np.unique(scan_source.data["l"], return_index=True)
for label, ind in zip(labels, inds):
legend_items.append(LegendItem(label=label, renderers=[scatter], index=ind))
# Legend items for propagation vector (color)
if prop_legend_flag:
if res_flag:
source, render = ellipse_source, ellipse
else:
source, render = scan_source, mline
labels, inds = np.unique(source.data["c"], return_index=True)
for label, ind in zip(labels, inds):
label = f"k={k[cmap.index(label)]}"
legend_items.append(LegendItem(label=label, renderers=[render], index=ind))
plot.legend.items = legend_items
scan_x2, scan_y2, scan_hkl2 = [], [], []
for j in range(len(hkl_coord2)):
# Get middle hkl from list
hklm = M @ hkl_coord2[j]
# Decide if point is in the cut
proj = np.dot(hklm, o_c)
if abs(proj - cut_or) >= cut_tol:
continue
# Project onto axes
hklmx = np.dot(hklm, x_c)
hklmy = np.dot(hklm, y_vert)
scan_x2.append(hklmx)
scan_y2.append(hklmy)
scan_hkl2.append(hkl_coord2[j])
scatter_source2.data.update(x=scan_x2, y=scan_y2, hkl=scan_hkl2)
return _update_slice
def plot_file_callback():
nonlocal _update_slice
_update_slice = _prepare_plotting()
_update_slice()
plot_file = Button(label="Plot selected file(s)", button_type="primary", width=200)
plot_file.on_click(plot_file_callback)
plot = figure(height=550, width=550 + 32, tools="pan,wheel_zoom,reset")
plot.toolbar.logo = None
plot.xaxis.visible = False
plot.xgrid.visible = False
plot.yaxis.visible = False
plot.ygrid.visible = False
arrow1 = Arrow(x_start=0, y_start=0, x_end=0, y_end=0, end=NormalHead(size=10))
plot.add_layout(arrow1)
arrow2 = Arrow(x_start=0, y_start=0, x_end=0, y_end=0, end=NormalHead(size=10))
plot.add_layout(arrow2)
kvect_source = ColumnDataSource(dict(x=[], y=[], text=[]))
plot.text(source=kvect_source)
grid_source = ColumnDataSource(dict(xs=[], ys=[]))
plot.multi_line(source=grid_source, line_color="gray")
minor_grid_source = ColumnDataSource(dict(xs=[], ys=[]))
plot.multi_line(source=minor_grid_source, line_color="gray", line_dash="dotted")
ellipse_source = ColumnDataSource(dict(x=[], y=[], width=[], height=[], c=[]))
ellipse = plot.ellipse(source=ellipse_source, fill_color="c", line_color="c")
scan_source = ColumnDataSource(
dict(xs=[], ys=[], x=[], y=[], m=[], s=[], c=[], l=[], hkl=[])
)
mline = plot.multi_line(source=scan_source, line_color="c")
scatter = plot.scatter(
source=scan_source, marker="m", size="s", fill_color="c", line_color="c"
)
scatter_source2 = ColumnDataSource(dict(x=[], y=[], hkl=[]))
scatter2 = plot.scatter(
source=scatter_source2, size=4, fill_color="green", line_color="green"
)
plot.x_range.renderers = [ellipse, mline, scatter, scatter2]
plot.y_range.renderers = [ellipse, mline, scatter, scatter2]
plot.add_layout(Legend(items=[], location="top_left", click_policy="hide"))
plot.add_tools(HoverTool(renderers=[scatter, scatter2], tooltips=[("hkl", "@hkl")]))
hkl_div = Div(text="HKL:", margin=(5, 5, 0, 5))
hkl_normal = TextInput(title="normal", value="0 0 1", width=70)
def hkl_cut_callback(_attr, _old, _new):
if _update_slice is not None:
_update_slice()
hkl_cut = Spinner(title="cut", value=0, step=0.1, width=70)
hkl_cut.on_change("value_throttled", hkl_cut_callback)
hkl_delta = NumericInput(title="delta", value=0.1, mode="float", width=70)
hkl_in_plane_x = TextInput(title="in-plane X", value="1 0 0", width=70)
hkl_in_plane_y = TextInput(title="in-plane Y", value="", width=100, disabled=True)
disting_opt_div = Div(text="Distinguish options:", margin=(5, 5, 0, 5))
disting_opt_cb = CheckboxGroup(
labels=["files (symbols)", "intensities (size)", "k vectors nucl/magn (colors)"],
active=[0, 1, 2],
width=200,
)
disting_opt_rb = RadioGroup(
labels=["scan direction", "resolution ellipsoid"], active=0, width=200
)
k_vectors = TextAreaInput(
title="k vectors:", value="0.0 0.0 0.0\n0.5 0.0 0.0\n0.5 0.5 0.0", width=150
)
res_mult_ni = NumericInput(title="Resolution mult:", value=10, mode="int", width=100)
tol_k_ni = NumericInput(title="k tolerance:", value=0.01, mode="float", width=100)
def show_legend_cb_callback(_attr, _old, new):
plot.legend.visible = 0 in new
show_legend_cb = CheckboxGroup(labels=["Show legend"], active=[0])
show_legend_cb.on_change("active", show_legend_cb_callback)
layout = column(
row(
column(row(measured_data_div, measured_data), row(upload_hkl_div, upload_hkl_fi)),
plot_file,
),
row(
plot,
column(
hkl_div,
row(hkl_normal, hkl_cut, hkl_delta),
row(hkl_in_plane_x, hkl_in_plane_y),
k_vectors,
row(tol_k_ni, res_mult_ni),
disting_opt_div,
disting_opt_cb,
disting_opt_rb,
show_legend_cb,
),
),
)
self.layout = layout

View File

@ -1,56 +1,47 @@
import logging
import os import os
import re import re
from ast import literal_eval
from collections import defaultdict from collections import defaultdict
import numpy as np import numpy as np
logger = logging.getLogger(__name__)
META_VARS_STR = ( META_VARS_STR = (
"instrument", "instrument",
"title", "title",
"comment", "sample",
"user", "user",
"proposal_id", "ProposalID",
"original_filename", "original_filename",
"date", "date",
"zebra_mode", "zebra_mode",
"zebramode", "proposal",
"sample_name", "proposal_user",
"proposal_title",
"proposal_email",
"detectorDistance",
) )
META_VARS_FLOAT = ( META_VARS_FLOAT = (
"omega",
"mf",
"2-theta",
"chi",
"phi",
"nu",
"temp",
"wavelenght",
"a", "a",
"b", "b",
"c", "c",
"alpha", "alpha",
"beta", "beta",
"gamma", "gamma",
"omega",
"chi",
"phi",
"temp",
"mf",
"temperature",
"magnetic_field",
"cex1", "cex1",
"cex2", "cex2",
"wavelength",
"mexz", "mexz",
"moml", "moml",
"mcvl", "mcvl",
"momu", "momu",
"mcvu", "mcvu",
"2-theta",
"twotheta",
"nu",
"gamma_angle",
"polar_angle",
"tilt_angle",
"distance",
"distance_an",
"snv", "snv",
"snh", "snh",
"snvm", "snvm",
@ -63,16 +54,9 @@ META_VARS_FLOAT = (
"s2vb", "s2vb",
"s2hr", "s2hr",
"s2hl", "s2hl",
"a5",
"a6",
"a4t",
"s2ant",
"s2anb",
"s2anl",
"s2anr",
) )
META_UB_MATRIX = ("ub1j", "ub2j", "ub3j", "UB") META_UB_MATRIX = ("ub1j", "ub2j", "ub3j")
CCL_FIRST_LINE = (("idx", int), ("h", float), ("k", float), ("l", float)) CCL_FIRST_LINE = (("idx", int), ("h", float), ("k", float), ("l", float))
@ -109,68 +93,47 @@ def load_1D(filepath):
""" """
with open(filepath, "r") as infile: with open(filepath, "r") as infile:
_, ext = os.path.splitext(filepath) _, ext = os.path.splitext(filepath)
dataset = parse_1D(infile, data_type=ext) det_variables = parse_1D(infile, data_type=ext)
return dataset return det_variables
def parse_1D(fileobj, data_type, log=logger): def parse_1D(fileobj, data_type):
metadata = {"data_type": data_type} metadata = {"data_type": data_type}
# read metadata # read metadata
for line in fileobj: for line in fileobj:
if "=" in line:
variable, value = line.split("=", 1)
variable = variable.strip()
value = value.strip()
if variable in META_VARS_STR:
metadata[variable] = value
elif variable in META_VARS_FLOAT:
if variable == "2-theta": # fix that angle name not to be an expression
variable = "twotheta"
if variable in ("a", "b", "c", "alpha", "beta", "gamma"):
variable += "_cell"
metadata[variable] = float(value)
elif variable in META_UB_MATRIX:
if "ub" not in metadata:
metadata["ub"] = np.zeros((3, 3))
row = int(variable[-2]) - 1
metadata["ub"][row, :] = list(map(float, value.split()))
if "#data" in line: if "#data" in line:
# this is the end of metadata and the start of data section # this is the end of metadata and the start of data section
break break
if "=" not in line:
# skip comments / empty lines
continue
var_name, value = line.split("=", 1)
var_name = var_name.strip()
value = value.strip()
if value == "UNKNOWN":
metadata[var_name] = None
continue
try:
if var_name in META_VARS_STR:
if var_name == "zebramode":
var_name = "zebra_mode"
metadata[var_name] = value
elif var_name in META_VARS_FLOAT:
if var_name == "2-theta": # fix that angle name not to be an expression
var_name = "twotheta"
if var_name == "temperature":
var_name = "temp"
if var_name == "magnetic_field":
var_name = "mf"
if var_name in ("a", "b", "c", "alpha", "beta", "gamma"):
var_name += "_cell"
metadata[var_name] = float(value)
elif var_name in META_UB_MATRIX:
if var_name == "UB":
metadata["ub"] = np.array(literal_eval(value)).reshape(3, 3)
else:
if "ub" not in metadata:
metadata["ub"] = np.zeros((3, 3))
row = int(var_name[-2]) - 1
metadata["ub"][row, :] = list(map(float, value.split()))
except Exception:
log.error(f"Error reading {var_name} with value '{value}'")
metadata[var_name] = 0
# handle older files that don't contain "zebra_mode" metadata # handle older files that don't contain "zebra_mode" metadata
if "zebra_mode" not in metadata: if "zebra_mode" not in metadata:
metadata["zebra_mode"] = "nb" metadata["zebra_mode"] = "nb"
# read data # read data
dataset = [] scan = []
if data_type == ".ccl": if data_type == ".ccl":
ccl_first_line = CCL_FIRST_LINE + CCL_ANGLES[metadata["zebra_mode"]] ccl_first_line = CCL_FIRST_LINE + CCL_ANGLES[metadata["zebra_mode"]]
ccl_second_line = CCL_SECOND_LINE ccl_second_line = CCL_SECOND_LINE
@ -180,73 +143,57 @@ def parse_1D(fileobj, data_type, log=logger):
if not line or line.isspace(): if not line or line.isspace():
continue continue
scan = {} s = {}
scan["export"] = True s["export"] = True
# first line # first line
for param, (param_name, param_type) in zip(line.split(), ccl_first_line): for param, (param_name, param_type) in zip(line.split(), ccl_first_line):
scan[param_name] = param_type(param) s[param_name] = param_type(param)
# rename 0 index scan to 1
if scan["idx"] == 0:
scan["idx"] = 1
# second line # second line
next_line = next(fileobj) next_line = next(fileobj)
for param, (param_name, param_type) in zip(next_line.split(), ccl_second_line): for param, (param_name, param_type) in zip(next_line.split(), ccl_second_line):
scan[param_name] = param_type(param) s[param_name] = param_type(param)
if "scan_motor" not in scan: if s["scan_motor"] != "om":
scan["scan_motor"] = "om"
if scan["scan_motor"] == "o2t":
scan["scan_motor"] = "om"
if scan["scan_motor"] != "om":
raise Exception("Unsupported variable name in ccl file.") raise Exception("Unsupported variable name in ccl file.")
# "om" -> "omega" # "om" -> "omega"
scan["scan_motor"] = "omega" s["scan_motor"] = "omega"
scan["scan_motors"] = ["omega"] s["scan_motors"] = ["omega", ]
# overwrite metadata, because it only refers to the scan center # overwrite metadata, because it only refers to the scan center
half_dist = (scan["n_points"] - 1) / 2 * scan["angle_step"] half_dist = (s["n_points"] - 1) / 2 * s["angle_step"]
scan["omega"] = np.linspace( s["omega"] = np.linspace(s["omega"] - half_dist, s["omega"] + half_dist, s["n_points"])
scan["omega"] - half_dist, scan["omega"] + half_dist, scan["n_points"]
)
# subsequent lines with counts # subsequent lines with counts
counts = [] counts = []
while len(counts) < scan["n_points"]: while len(counts) < s["n_points"]:
counts.extend(map(float, next(fileobj).split())) counts.extend(map(float, next(fileobj).split()))
scan["counts"] = np.array(counts) s["counts"] = np.array(counts)
scan["counts_err"] = np.sqrt(np.maximum(scan["counts"], 1)) s["counts_err"] = np.sqrt(np.maximum(s["counts"], 1))
if scan["h"].is_integer() and scan["k"].is_integer() and scan["l"].is_integer(): if s["h"].is_integer() and s["k"].is_integer() and s["l"].is_integer():
scan["h"], scan["k"], scan["l"] = map(int, (scan["h"], scan["k"], scan["l"])) s["h"], s["k"], s["l"] = map(int, (s["h"], s["k"], s["l"]))
dataset.append({**metadata, **scan}) scan.append({**metadata, **s})
elif data_type == ".dat": elif data_type == ".dat":
# TODO: this might need to be adapted in the future, when "gamma" will be added to dat files
if metadata["zebra_mode"] == "nb": if metadata["zebra_mode"] == "nb":
if "gamma_angle" in metadata: metadata["gamma"] = metadata["twotheta"]
# support for the new format
metadata["gamma"] = metadata["gamma_angle"]
else:
metadata["gamma"] = metadata["twotheta"]
scan = defaultdict(list) s = defaultdict(list)
scan["export"] = True s["export"] = True
match = re.search("Scanning Variables: (.*), Steps: (.*)", next(fileobj)) match = re.search("Scanning Variables: (.*), Steps: (.*)", next(fileobj))
motors = [motor.strip().lower() for motor in match.group(1).split(",")] motors = [motor.lower() for motor in match.group(1).split(", ")]
# Steps can be separated by " " or ", " steps = [float(step) for step in match.group(2).split()]
steps = [float(step.strip(",")) for step in match.group(2).split()]
match = re.search("(.*) Points, Mode: (.*), Preset (.*)", next(fileobj)) match = re.search("(.*) Points, Mode: (.*), Preset (.*)", next(fileobj))
if match.group(2) != "Monitor": if match.group(2) != "Monitor":
raise Exception("Unknown mode in dat file.") raise Exception("Unknown mode in dat file.")
scan["n_points"] = int(match.group(1)) s["n_points"] = int(match.group(1))
scan["monitor"] = float(match.group(3)) s["monitor"] = float(match.group(3))
col_names = list(map(str.lower, next(fileobj).split())) col_names = list(map(str.lower, next(fileobj).split()))
@ -256,56 +203,56 @@ def parse_1D(fileobj, data_type, log=logger):
break break
for name, val in zip(col_names, line.split()): for name, val in zip(col_names, line.split()):
scan[name].append(float(val)) s[name].append(float(val))
for name in col_names: for name in col_names:
scan[name] = np.array(scan[name]) s[name] = np.array(s[name])
scan["counts_err"] = np.sqrt(np.maximum(scan["counts"], 1)) s["counts_err"] = np.sqrt(np.maximum(s["counts"], 1))
scan["scan_motors"] = [] s["scan_motors"] = []
for motor, step in zip(motors, steps): for motor, step in zip(motors, steps):
if step == 0: if step == 0:
# it's not a scan motor, so keep only the median value # it's not a scan motor, so keep only the median value
scan[motor] = np.median(scan[motor]) s[motor] = np.median(s[motor])
else: else:
scan["scan_motors"].append(motor) s["scan_motors"].append(motor)
# "om" -> "omega" # "om" -> "omega"
if "om" in scan["scan_motors"]: if "om" in s["scan_motors"]:
scan["scan_motors"][scan["scan_motors"].index("om")] = "omega" s["scan_motors"][s["scan_motors"].index("om")] = "omega"
scan["omega"] = scan["om"] s["omega"] = s["om"]
del scan["om"] del s["om"]
# "tt" -> "temp" # "tt" -> "temp"
if "tt" in scan["scan_motors"]: if "tt" in s["scan_motors"]:
scan["scan_motors"][scan["scan_motors"].index("tt")] = "temp" s["scan_motors"][s["scan_motors"].index("tt")] = "temp"
scan["temp"] = scan["tt"] s["temp"] = s["tt"]
del scan["tt"] del s["tt"]
# "mf" stays "mf" # "mf" stays "mf"
# "phi" stays "phi" # "phi" stays "phi"
scan["scan_motor"] = scan["scan_motors"][0] s["scan_motor"] = s["scan_motors"][0]
if "h" not in scan: if "h" not in s:
scan["h"] = scan["k"] = scan["l"] = float("nan") s["h"] = s["k"] = s["l"] = float("nan")
for param in ("mf", "temp"): for param in ("mf", "temp"):
if param not in metadata: if param not in metadata:
scan[param] = 0 s[param] = 0
scan["idx"] = 1 s["idx"] = 1
dataset.append({**metadata, **scan}) scan.append({**metadata, **s})
else: else:
log.error("Unknown file extention") print("Unknown file extention")
return dataset return scan
def export_1D(dataset, path, export_target, hkl_precision=2): def export_1D(data, path, export_target, hkl_precision=2):
"""Exports data in the .comm/.incomm format for fullprof or .col/.incol format for jana. """Exports data in the .comm/.incomm format for fullprof or .col/.incol format for jana.
Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files
@ -315,11 +262,11 @@ def export_1D(dataset, path, export_target, hkl_precision=2):
if export_target not in EXPORT_TARGETS: if export_target not in EXPORT_TARGETS:
raise ValueError(f"Unknown export target: {export_target}.") raise ValueError(f"Unknown export target: {export_target}.")
zebra_mode = dataset[0]["zebra_mode"] zebra_mode = data[0]["zebra_mode"]
exts = EXPORT_TARGETS[export_target] exts = EXPORT_TARGETS[export_target]
file_content = {ext: [] for ext in exts} file_content = {ext: [] for ext in exts}
for scan in dataset: for scan in data:
if "fit" not in scan: if "fit" not in scan:
continue continue
@ -359,7 +306,7 @@ def export_1D(dataset, path, export_target, hkl_precision=2):
out_file.writelines(content) out_file.writelines(content)
def export_ccl_compare(dataset1, dataset2, path, export_target, hkl_precision=2): def export_ccl_compare(data1, data2, path, export_target, hkl_precision=2):
"""Exports compare data in the .comm/.incomm format for fullprof or .col/.incol format for jana. """Exports compare data in the .comm/.incomm format for fullprof or .col/.incol format for jana.
Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files Scans with integer/real hkl values are saved in .comm/.incomm or .col/.incol files
@ -369,11 +316,11 @@ def export_ccl_compare(dataset1, dataset2, path, export_target, hkl_precision=2)
if export_target not in EXPORT_TARGETS: if export_target not in EXPORT_TARGETS:
raise ValueError(f"Unknown export target: {export_target}.") raise ValueError(f"Unknown export target: {export_target}.")
zebra_mode = dataset1[0]["zebra_mode"] zebra_mode = data1[0]["zebra_mode"]
exts = EXPORT_TARGETS[export_target] exts = EXPORT_TARGETS[export_target]
file_content = {ext: [] for ext in exts} file_content = {ext: [] for ext in exts}
for scan1, scan2 in zip(dataset1, dataset2): for scan1, scan2 in zip(data1, data2):
if "fit" not in scan1: if "fit" not in scan1:
continue continue
@ -389,7 +336,7 @@ def export_ccl_compare(dataset1, dataset2, path, export_target, hkl_precision=2)
area_n1, area_s1 = scan1["area"] area_n1, area_s1 = scan1["area"]
area_n2, area_s2 = scan2["area"] area_n2, area_s2 = scan2["area"]
area_n = area_n1 - area_n2 area_n = area_n1 - area_n2
area_s = np.sqrt(area_s1**2 + area_s2**2) area_s = np.sqrt(area_s1 ** 2 + area_s2 ** 2)
area_str = f"{area_n:10.2f}{area_s:10.2f}" area_str = f"{area_n:10.2f}{area_s:10.2f}"
ang_str = "" ang_str = ""
@ -416,9 +363,9 @@ def export_ccl_compare(dataset1, dataset2, path, export_target, hkl_precision=2)
out_file.writelines(content) out_file.writelines(content)
def export_param_study(dataset, param_data, path): def export_param_study(data, param_data, path):
file_content = [] file_content = []
for scan, param in zip(dataset, param_data): for scan, param in zip(data, param_data):
if "fit" not in scan: if "fit" not in scan:
continue continue
@ -433,11 +380,7 @@ def export_param_study(dataset, param_data, path):
fit_str = "" fit_str = ""
for fit_param in scan["fit"].params.values(): for fit_param in scan["fit"].params.values():
fit_param_val = fit_param.value fit_str = fit_str + f"{fit_param.value:<20.2f}" + f"{fit_param.stderr:<20.2f}"
fit_param_std = fit_param.stderr
if fit_param_std is None:
fit_param_std = 0
fit_str = fit_str + f"{fit_param_val:<20.2f}" + f"{fit_param_std:<20.2f}"
_, fname_str = os.path.split(scan["original_filename"]) _, fname_str = os.path.split(scan["original_filename"])

View File

@ -1,13 +1,10 @@
import logging
import os import os
import numpy as np import numpy as np
from lmfit.models import GaussianModel, LinearModel, PseudoVoigtModel, VoigtModel from lmfit.models import Gaussian2dModel, GaussianModel, LinearModel, PseudoVoigtModel, VoigtModel
from scipy.integrate import simpson, trapezoid from scipy.integrate import simpson, trapezoid
from pyzebra import CCL_ANGLES from .ccl_io import CCL_ANGLES
logger = logging.getLogger(__name__)
PARAM_PRECISIONS = { PARAM_PRECISIONS = {
"twotheta": 0.1, "twotheta": 0.1,
@ -21,9 +18,9 @@ PARAM_PRECISIONS = {
"ub": 0.01, "ub": 0.01,
} }
MAX_RANGE_GAP = {"omega": 0.5} MAX_RANGE_GAP = {
"omega": 0.5,
MOTOR_POS_PRECISION = 0.01 }
AREA_METHODS = ("fit_area", "int_area") AREA_METHODS = ("fit_area", "int_area")
@ -36,12 +33,12 @@ def normalize_dataset(dataset, monitor=100_000):
scan["monitor"] = monitor scan["monitor"] = monitor
def merge_duplicates(dataset, log=logger): def merge_duplicates(dataset):
merged = np.zeros(len(dataset), dtype=bool) merged = np.zeros(len(dataset), dtype=np.bool)
for ind_into, scan_into in enumerate(dataset): for ind_into, scan_into in enumerate(dataset):
for ind_from, scan_from in enumerate(dataset[ind_into + 1 :], start=ind_into + 1): for ind_from, scan_from in enumerate(dataset[ind_into + 1 :], start=ind_into + 1):
if _parameters_match(scan_into, scan_from) and not merged[ind_from]: if _parameters_match(scan_into, scan_from) and not merged[ind_from]:
merge_scans(scan_into, scan_from, log=log) merge_scans(scan_into, scan_from)
merged[ind_from] = True merged[ind_from] = True
@ -50,58 +47,45 @@ def _parameters_match(scan1, scan2):
if zebra_mode != scan2["zebra_mode"]: if zebra_mode != scan2["zebra_mode"]:
return False return False
for param in ("ub", *(vars[0] for vars in CCL_ANGLES[zebra_mode])): for param in ("ub", "temp", "mf", *(vars[0] for vars in CCL_ANGLES[zebra_mode])):
if param.startswith("skip"): if param.startswith("skip"):
# ignore skip parameters, like the last angle in 'nb' zebra mode # ignore skip parameters, like the last angle in 'nb' zebra mode
continue continue
if param == scan1["scan_motor"] == scan2["scan_motor"]: if param == scan1["scan_motor"] == scan2["scan_motor"]:
# check if ranges of variable parameter overlap # check if ranges of variable parameter overlap
r1_start, r1_end = scan1[param][0], scan1[param][-1] range1 = scan1[param]
r2_start, r2_end = scan2[param][0], scan2[param][-1] range2 = scan2[param]
# support reversed ranges
if r1_start > r1_end:
r1_start, r1_end = r1_end, r1_start
if r2_start > r2_end:
r2_start, r2_end = r2_end, r2_start
# maximum gap between ranges of the scanning parameter (default 0) # maximum gap between ranges of the scanning parameter (default 0)
max_range_gap = MAX_RANGE_GAP.get(param, 0) max_range_gap = MAX_RANGE_GAP.get(param, 0)
if max(r1_start - r2_end, r2_start - r1_end) > max_range_gap: if max(range1[0] - range2[-1], range2[0] - range1[-1]) > max_range_gap:
return False return False
elif ( elif np.max(np.abs(scan1[param] - scan2[param])) > PARAM_PRECISIONS[param]:
np.max(np.abs(np.median(scan1[param]) - np.median(scan2[param])))
> PARAM_PRECISIONS[param]
):
return False return False
return True return True
def merge_datasets(dataset_into, dataset_from, log=logger): def merge_datasets(dataset_into, dataset_from):
scan_motors_into = dataset_into[0]["scan_motors"] scan_motors_into = dataset_into[0]["scan_motors"]
scan_motors_from = dataset_from[0]["scan_motors"] scan_motors_from = dataset_from[0]["scan_motors"]
if scan_motors_into != scan_motors_from: if scan_motors_into != scan_motors_from:
log.warning( print(f"Scan motors mismatch between datasets: {scan_motors_into} vs {scan_motors_from}")
f"Scan motors mismatch between datasets: {scan_motors_into} vs {scan_motors_from}"
)
return return
merged = np.zeros(len(dataset_from), dtype=bool) merged = np.zeros(len(dataset_from), dtype=np.bool)
for scan_into in dataset_into: for scan_into in dataset_into:
for ind, scan_from in enumerate(dataset_from): for ind, scan_from in enumerate(dataset_from):
if _parameters_match(scan_into, scan_from) and not merged[ind]: if _parameters_match(scan_into, scan_from) and not merged[ind]:
if scan_into["counts"].ndim == 3: merge_scans(scan_into, scan_from)
merge_h5_scans(scan_into, scan_from, log=log)
else: # scan_into["counts"].ndim == 1
merge_scans(scan_into, scan_from, log=log)
merged[ind] = True merged[ind] = True
for scan_from in dataset_from: for scan_from in dataset_from:
dataset_into.append(scan_from) dataset_into.append(scan_from)
def merge_scans(scan_into, scan_from, log=logger): def merge_scans(scan_into, scan_from):
if "init_scan" not in scan_into: if "init_scan" not in scan_into:
scan_into["init_scan"] = scan_into.copy() scan_into["init_scan"] = scan_into.copy()
@ -133,7 +117,7 @@ def merge_scans(scan_into, scan_from, log=logger):
err_tmp = err_all[:1] err_tmp = err_all[:1]
num_tmp = np.array([1]) num_tmp = np.array([1])
for pos, val, err in zip(pos_all[1:], val_all[1:], err_all[1:]): for pos, val, err in zip(pos_all[1:], val_all[1:], err_all[1:]):
if pos - pos_tmp[-1] < MOTOR_POS_PRECISION: if pos - pos_tmp[-1] < 0.0005:
# the repeated motor position # the repeated motor position
val_tmp[-1] += val val_tmp[-1] += val
err_tmp[-1] += err err_tmp[-1] += err
@ -153,71 +137,7 @@ def merge_scans(scan_into, scan_from, log=logger):
fname1 = os.path.basename(scan_into["original_filename"]) fname1 = os.path.basename(scan_into["original_filename"])
fname2 = os.path.basename(scan_from["original_filename"]) fname2 = os.path.basename(scan_from["original_filename"])
log.info(f'Merging scans: {scan_into["idx"]} ({fname1}) <-- {scan_from["idx"]} ({fname2})') print(f'Merging scans: {scan_into["idx"]} ({fname1}) <-- {scan_from["idx"]} ({fname2})')
def merge_h5_scans(scan_into, scan_from, log=logger):
if "init_scan" not in scan_into:
scan_into["init_scan"] = scan_into.copy()
if "merged_scans" not in scan_into:
scan_into["merged_scans"] = []
for scan in scan_into["merged_scans"]:
if scan_from is scan:
log.warning("Already merged scan")
return
scan_into["merged_scans"].append(scan_from)
scan_motor = scan_into["scan_motor"] # the same as scan_from["scan_motor"]
pos_all = [scan_into["init_scan"][scan_motor]]
val_all = [scan_into["init_scan"]["counts"]]
err_all = [scan_into["init_scan"]["counts_err"] ** 2]
for scan in scan_into["merged_scans"]:
pos_all.append(scan[scan_motor])
val_all.append(scan["counts"])
err_all.append(scan["counts_err"] ** 2)
pos_all = np.concatenate(pos_all)
val_all = np.concatenate(val_all)
err_all = np.concatenate(err_all)
sort_index = np.argsort(pos_all)
pos_all = pos_all[sort_index]
val_all = val_all[sort_index]
err_all = err_all[sort_index]
pos_tmp = [pos_all[0]]
val_tmp = [val_all[:1]]
err_tmp = [err_all[:1]]
num_tmp = [1]
for pos, val, err in zip(pos_all[1:], val_all[1:], err_all[1:]):
if pos - pos_tmp[-1] < MOTOR_POS_PRECISION:
# the repeated motor position
val_tmp[-1] += val
err_tmp[-1] += err
num_tmp[-1] += 1
else:
# a new motor position
pos_tmp.append(pos)
val_tmp.append(val[None, :])
err_tmp.append(err[None, :])
num_tmp.append(1)
pos_tmp = np.array(pos_tmp)
val_tmp = np.concatenate(val_tmp)
err_tmp = np.concatenate(err_tmp)
num_tmp = np.array(num_tmp)
scan_into[scan_motor] = pos_tmp
scan_into["counts"] = val_tmp / num_tmp[:, None, None]
scan_into["counts_err"] = np.sqrt(err_tmp) / num_tmp[:, None, None]
scan_from["export"] = False
fname1 = os.path.basename(scan_into["original_filename"])
fname2 = os.path.basename(scan_from["original_filename"])
log.info(f'Merging scans: {scan_into["idx"]} ({fname1}) <-- {scan_from["idx"]} ({fname2})')
def restore_scan(scan): def restore_scan(scan):
@ -235,7 +155,7 @@ def restore_scan(scan):
scan["export"] = True scan["export"] = True
def fit_scan(scan, model_dict, fit_from=None, fit_to=None, log=logger): def fit_scan(scan, model_dict, fit_from=None, fit_to=None):
if fit_from is None: if fit_from is None:
fit_from = -np.inf fit_from = -np.inf
if fit_to is None: if fit_to is None:
@ -248,7 +168,7 @@ def fit_scan(scan, model_dict, fit_from=None, fit_to=None, log=logger):
# apply fitting range # apply fitting range
fit_ind = (fit_from <= x_fit) & (x_fit <= fit_to) fit_ind = (fit_from <= x_fit) & (x_fit <= fit_to)
if not np.any(fit_ind): if not np.any(fit_ind):
log.warning(f"No data in fit range for scan {scan['idx']}") print(f"No data in fit range for scan {scan['idx']}")
return return
y_fit = y_fit[fit_ind] y_fit = y_fit[fit_ind]
@ -339,3 +259,31 @@ def get_area(scan, area_method, lorentz):
area_s = np.abs(area_s * corr_factor) area_s = np.abs(area_s * corr_factor)
scan["area"] = (area_v, area_s) scan["area"] = (area_v, area_s)
def fit_event(scan, fr_from, fr_to, y_from, y_to, x_from, x_to):
data_roi = scan["data"][fr_from:fr_to, y_from:y_to, x_from:x_to]
model = GaussianModel()
fr = np.arange(fr_from, fr_to)
counts_per_fr = np.sum(data_roi, axis=(1, 2))
params = model.guess(counts_per_fr, fr)
result = model.fit(counts_per_fr, x=fr, params=params)
frC = result.params["center"].value
intensity = result.params["height"].value
counts_std = counts_per_fr.std()
counts_mean = counts_per_fr.mean()
snr = 0 if counts_std == 0 else counts_mean / counts_std
model = Gaussian2dModel()
xs, ys = np.meshgrid(np.arange(x_from, x_to), np.arange(y_from, y_to))
xs = xs.flatten()
ys = ys.flatten()
counts = np.sum(data_roi, axis=0).flatten()
params = model.guess(counts, xs, ys)
result = model.fit(counts, x=xs, y=ys, params=params)
xC = result.params["centerx"].value
yC = result.params["centery"].value
scan["fit"] = {"frame": frC, "x_pos": xC, "y_pos": yC, "intensity": intensity, "snr": snr}

View File

@ -1,11 +1,10 @@
import h5py import h5py
import numpy as np import numpy as np
from lmfit.models import Gaussian2dModel, GaussianModel
META_MATRIX = ("UB",)
META_CELL = ("cell",)
META_STR = ("name",)
META_MATRIX = ("UB")
META_CELL = ("cell")
META_STR = ("name")
def read_h5meta(filepath): def read_h5meta(filepath):
"""Open and parse content of a h5meta file. """Open and parse content of a h5meta file.
@ -47,9 +46,9 @@ def parse_h5meta(file):
if variable in META_STR: if variable in META_STR:
pass pass
elif variable in META_CELL: elif variable in META_CELL:
value = np.array(value.split(",")[:6], dtype=float) value = np.array(value.split(",")[:6], dtype=np.float)
elif variable in META_MATRIX: elif variable in META_MATRIX:
value = np.array(value.split(",")[:9], dtype=float).reshape(3, 3) value = np.array(value.split(",")[:9], dtype=np.float).reshape(3, 3)
else: # default is a single float number else: # default is a single float number
value = float(value) value = float(value)
content[section][variable] = value content[section][variable] = value
@ -69,144 +68,75 @@ def read_detector_data(filepath, cami_meta=None):
ndarray: A 3D array of data, omega, gamma, nu. ndarray: A 3D array of data, omega, gamma, nu.
""" """
with h5py.File(filepath, "r") as h5f: with h5py.File(filepath, "r") as h5f:
counts = h5f["/entry1/area_detector2/data"][:].astype(float) data = h5f["/entry1/area_detector2/data"][:]
n, cols, rows = counts.shape # reshape data to a correct shape (2006 issue)
if "/entry1/experiment_identifier" in h5f: # old format n, cols, rows = data.shape
# reshape images (counts) to a correct shape (2006 issue) data = data.reshape(n, rows, cols)
counts = counts.reshape(n, rows, cols)
else:
counts = counts.swapaxes(1, 2)
scan = {"counts": counts, "counts_err": np.sqrt(np.maximum(counts, 1))} det_data = {"data": data}
scan["original_filename"] = filepath det_data["original_filename"] = filepath
scan["export"] = True
if "/entry1/zebra_mode" in h5f: if "/entry1/zebra_mode" in h5f:
scan["zebra_mode"] = h5f["/entry1/zebra_mode"][0].decode() det_data["zebra_mode"] = h5f["/entry1/zebra_mode"][0].decode()
else: else:
scan["zebra_mode"] = "nb" det_data["zebra_mode"] = "nb"
# overwrite zebra_mode from cami # overwrite zebra_mode from cami
if cami_meta is not None: if cami_meta is not None:
if "zebra_mode" in cami_meta: if "zebra_mode" in cami_meta:
scan["zebra_mode"] = cami_meta["zebra_mode"][0] det_data["zebra_mode"] = cami_meta["zebra_mode"][0]
if "/entry1/control/Monitor" in h5f: # om, sometimes ph
scan["monitor"] = h5f["/entry1/control/Monitor"][0] if det_data["zebra_mode"] == "nb":
else: # old path det_data["omega"] = h5f["/entry1/area_detector2/rotation_angle"][:]
scan["monitor"] = h5f["/entry1/control/data"][0] else: # bi
det_data["omega"] = h5f["/entry1/sample/rotation_angle"][:]
scan["idx"] = 1 det_data["gamma"] = h5f["/entry1/ZEBRA/area_detector2/polar_angle"][:] # gammad
det_data["nu"] = h5f["/entry1/ZEBRA/area_detector2/tilt_angle"][:] # nud
if "/entry1/sample/rotation_angle" in h5f: det_data["ddist"] = h5f["/entry1/ZEBRA/area_detector2/distance"][:]
scan["omega"] = h5f["/entry1/sample/rotation_angle"][:] det_data["wave"] = h5f["/entry1/ZEBRA/monochromator/wavelength"][:]
else: det_data["chi"] = h5f["/entry1/sample/chi"][:] # ch
scan["omega"] = h5f["/entry1/area_detector2/rotation_angle"][:] det_data["phi"] = h5f["/entry1/sample/phi"][:] # ph
if len(scan["omega"]) == 1: det_data["ub"] = h5f["/entry1/sample/UB"][:].reshape(3, 3)
scan["omega"] = np.ones(n) * scan["omega"] det_data["name"] = h5f["/entry1/sample/name"][0].decode()
det_data["cell"] = h5f["/entry1/sample/cell"][:]
scan["gamma"] = h5f["/entry1/ZEBRA/area_detector2/polar_angle"][:]
scan["twotheta"] = h5f["/entry1/ZEBRA/area_detector2/polar_angle"][:]
if len(scan["gamma"]) == 1:
scan["gamma"] = np.ones(n) * scan["gamma"]
scan["twotheta"] = np.ones(n) * scan["twotheta"]
scan["nu"] = h5f["/entry1/ZEBRA/area_detector2/tilt_angle"][0]
scan["ddist"] = h5f["/entry1/ZEBRA/area_detector2/distance"][0]
scan["wave"] = h5f["/entry1/ZEBRA/monochromator/wavelength"][0]
if scan["zebra_mode"] == "nb":
scan["chi"] = np.array([180])
scan["phi"] = np.array([0])
elif scan["zebra_mode"] == "bi":
scan["chi"] = h5f["/entry1/sample/chi"][:]
scan["phi"] = h5f["/entry1/sample/phi"][:]
if len(scan["chi"]) == 1:
scan["chi"] = np.ones(n) * scan["chi"]
if len(scan["phi"]) == 1:
scan["phi"] = np.ones(n) * scan["phi"]
if h5f["/entry1/sample/UB"].size == 0:
scan["ub"] = np.eye(3) * 0.177
else:
scan["ub"] = h5f["/entry1/sample/UB"][:].reshape(3, 3)
scan["name"] = h5f["/entry1/sample/name"][0].decode()
scan["cell"] = h5f["/entry1/sample/cell"][:]
if n == 1: if n == 1:
# a default motor for a single frame file # a default motor for a single frame file
scan["scan_motor"] = "omega" det_data["scan_motor"] = "omega"
else: else:
for var in ("omega", "gamma", "chi", "phi"): # TODO: also nu? for var in ("omega", "gamma", "nu", "chi", "phi"):
if abs(scan[var][0] - scan[var][-1]) > 0.1: if abs(det_data[var][0] - det_data[var][-1]) > 0.1:
scan["scan_motor"] = var det_data["scan_motor"] = var
break break
else: else:
raise ValueError("No angles that vary") raise ValueError("No angles that vary")
scan["scan_motors"] = [scan["scan_motor"]]
# optional parameters # optional parameters
if "/entry1/sample/magnetic_field" in h5f: if "/entry1/sample/magnetic_field" in h5f:
scan["mf"] = h5f["/entry1/sample/magnetic_field"][:] det_data["mf"] = h5f["/entry1/sample/magnetic_field"][:]
if "mf" in scan:
# TODO: NaNs are not JSON compliant, so replace them with None
# this is not a great solution, but makes it safe to use the array in bokeh
scan["mf"] = np.where(np.isnan(scan["mf"]), None, scan["mf"])
if "/entry1/sample/temperature" in h5f: if "/entry1/sample/temperature" in h5f:
scan["temp"] = h5f["/entry1/sample/temperature"][:] det_data["temp"] = h5f["/entry1/sample/temperature"][:]
elif "/entry1/sample/Ts/value" in h5f:
scan["temp"] = h5f["/entry1/sample/Ts/value"][:]
if "temp" in scan:
# TODO: NaNs are not JSON compliant, so replace them with None
# this is not a great solution, but makes it safe to use the array in bokeh
scan["temp"] = np.where(np.isnan(scan["temp"]), None, scan["temp"])
# overwrite metadata from .cami # overwrite metadata from .cami
if cami_meta is not None: if cami_meta is not None:
if "crystal" in cami_meta: if "crystal" in cami_meta:
cami_meta_crystal = cami_meta["crystal"] cami_meta_crystal = cami_meta["crystal"]
if "name" in cami_meta_crystal: if "name" in cami_meta_crystal:
scan["name"] = cami_meta_crystal["name"] det_data["name"] = cami_meta_crystal["name"]
if "UB" in cami_meta_crystal: if "UB" in cami_meta_crystal:
scan["ub"] = cami_meta_crystal["UB"] det_data["ub"] = cami_meta_crystal["UB"]
if "cell" in cami_meta_crystal: if "cell" in cami_meta_crystal:
scan["cell"] = cami_meta_crystal["cell"] det_data["cell"] = cami_meta_crystal["cell"]
if "lambda" in cami_meta_crystal: if "lambda" in cami_meta_crystal:
scan["wave"] = cami_meta_crystal["lambda"] det_data["wave"] = cami_meta_crystal["lambda"]
if "detector parameters" in cami_meta: if "detector parameters" in cami_meta:
cami_meta_detparam = cami_meta["detector parameters"] cami_meta_detparam = cami_meta["detector parameters"]
if "dist2" in cami_meta_detparam: if "dist1" in cami_meta_detparam:
scan["ddist"] = cami_meta_detparam["dist2"] det_data["ddist"] = cami_meta_detparam["dist1"]
return scan return det_data
def fit_event(scan, fr_from, fr_to, y_from, y_to, x_from, x_to):
data_roi = scan["counts"][fr_from:fr_to, y_from:y_to, x_from:x_to]
model = GaussianModel()
fr = np.arange(fr_from, fr_to)
counts_per_fr = np.sum(data_roi, axis=(1, 2))
params = model.guess(counts_per_fr, fr)
result = model.fit(counts_per_fr, x=fr, params=params)
frC = result.params["center"].value
intensity = result.params["height"].value
counts_std = counts_per_fr.std()
counts_mean = counts_per_fr.mean()
snr = 0 if counts_std == 0 else counts_mean / counts_std
model = Gaussian2dModel()
xs, ys = np.meshgrid(np.arange(x_from, x_to), np.arange(y_from, y_to))
xs = xs.flatten()
ys = ys.flatten()
counts = np.sum(data_roi, axis=0).flatten()
params = model.guess(counts, xs, ys)
result = model.fit(counts, x=xs, y=ys, params=params)
xC = result.params["centerx"].value
yC = result.params["centery"].value
scan["fit"] = {"frame": frC, "x_pos": xC, "y_pos": yC, "intensity": intensity, "snr": snr}

View File

@ -1,486 +0,0 @@
import io
import logging
import os
import subprocess
import tempfile
from math import ceil, floor
import numpy as np
logger = logging.getLogger(__name__)
SXTAL_REFGEN_PATH = "/afs/psi.ch/project/sinq/rhel8/bin/Sxtal_Refgen"
_zebraBI_default_geom = """GEOM 2 Bissecting - HiCHI
BLFR z-up
DIST_UNITS mm
ANGL_UNITS deg
DET_TYPE Point ipsd 1
DIST_DET 488
DIM_XY 1.0 1.0 1 1
GAPS_DET 0 0
SETTING 1 0 0 0 1 0 0 0 1
NUM_ANG 4
ANG_LIMITS Min Max Offset
Gamma 0.0 128.0 0.00
Omega 0.0 64.0 0.00
Chi 80.0 211.0 0.00
Phi 0.0 360.0 0.00
DET_OFF 0 0 0
"""
_zebraNB_default_geom = """GEOM 3 Normal Beam
BLFR z-up
DIST_UNITS mm
ANGL_UNITS deg
DET_TYPE Point ipsd 1
DIST_DET 448
DIM_XY 1.0 1.0 1 1
GAPS_DET 0 0
SETTING 1 0 0 0 1 0 0 0 1
NUM_ANG 3
ANG_LIMITS Min Max Offset
Gamma 0.0 128.0 0.00
Omega -180.0 180.0 0.00
Nu -15.0 15.0 0.00
DET_OFF 0 0 0
"""
_zebra_default_cfl = """TITLE mymaterial
SPGR P 63 2 2
CELL 5.73 5.73 11.89 90 90 120
WAVE 1.383
UBMAT
0.000000 0.000000 0.084104
0.000000 0.174520 -0.000000
0.201518 0.100759 0.000000
INSTR zebra.geom
ORDER 1 2 3
ANGOR gamma
HLIM -25 25 -25 25 -25 25
SRANG 0.0 0.7
Mag_Structure
lattiCE P 1
kvect 0.0 0.0 0.0
magcent
symm x,y,z
msym u,v,w, 0.0
End_Mag_Structure
"""
def get_zebraBI_default_geom_file():
return io.StringIO(_zebraBI_default_geom)
def get_zebraNB_default_geom_file():
return io.StringIO(_zebraNB_default_geom)
def get_zebra_default_cfl_file():
return io.StringIO(_zebra_default_cfl)
def read_geom_file(fileobj):
ang_lims = dict()
for line in fileobj:
if "!" in line: # remove comments that start with ! sign
line, _ = line.split(sep="!", maxsplit=1)
if line.startswith("GEOM"):
_, val = line.split(maxsplit=1)
if val.startswith("2"):
ang_lims["geom"] = "bi"
else: # val.startswith("3")
ang_lims["geom"] = "nb"
elif line.startswith("ANG_LIMITS"):
# read angular limits
for line in fileobj:
if not line or line.isspace():
break
ang, ang_min, ang_max, ang_offset = line.split()
ang_lims[ang.lower()] = [ang_min, ang_max, ang_offset]
if "2theta" in ang_lims: # treat 2theta as gamma
ang_lims["gamma"] = ang_lims.pop("2theta")
return ang_lims
def export_geom_file(path, ang_lims, template=None):
if ang_lims["geom"] == "bi":
template_file = get_zebraBI_default_geom_file()
n_ang = 4
else: # ang_lims["geom"] == "nb"
template_file = get_zebraNB_default_geom_file()
n_ang = 3
if template is not None:
template_file = template
with open(path, "w") as out_file:
for line in template_file:
out_file.write(line)
if line.startswith("ANG_LIMITS"):
for _ in range(n_ang):
next_line = next(template_file)
ang, _, _, _ = next_line.split()
if ang == "2theta": # treat 2theta as gamma
ang = "Gamma"
vals = ang_lims[ang.lower()]
out_file.write(f"{'':<8}{ang:<10}{vals[0]:<10}{vals[1]:<10}{vals[2]:<10}\n")
def calc_ub_matrix(params, log=logger):
with tempfile.TemporaryDirectory() as temp_dir:
cfl_file = os.path.join(temp_dir, "ub_matrix.cfl")
with open(cfl_file, "w") as fileobj:
for key, value in params.items():
fileobj.write(f"{key} {value}\n")
comp_proc = subprocess.run(
[SXTAL_REFGEN_PATH, cfl_file],
cwd=temp_dir,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
)
log.info(" ".join(comp_proc.args))
log.info(comp_proc.stdout)
sfa_file = os.path.join(temp_dir, "ub_matrix.sfa")
ub_matrix = []
with open(sfa_file, "r") as fileobj:
for line in fileobj:
if "BL_M" in line: # next 3 lines contain the matrix
for _ in range(3):
next_line = next(fileobj)
*vals, _ = next_line.split(maxsplit=3)
ub_matrix.extend(vals)
return ub_matrix
def read_cfl_file(fileobj):
params = {
"SPGR": None,
"CELL": None,
"WAVE": None,
"UBMAT": None,
"HLIM": None,
"SRANG": None,
"lattiCE": None,
"kvect": None,
}
param_names = tuple(params)
for line in fileobj:
line = line.strip()
if "!" in line: # remove comments that start with ! sign
line, _ = line.split(sep="!", maxsplit=1)
if line.startswith(param_names):
if line.startswith("UBMAT"): # next 3 lines contain the matrix
param, val = "UBMAT", []
for _ in range(3):
next_line = next(fileobj).strip()
val.extend(next_line.split(maxsplit=2))
else:
param, val = line.split(maxsplit=1)
params[param] = val
return params
def read_cif_file(fileobj):
params = {"SPGR": None, "CELL": None, "ATOM": []}
cell_params = {
"_cell_length_a": None,
"_cell_length_b": None,
"_cell_length_c": None,
"_cell_angle_alpha": None,
"_cell_angle_beta": None,
"_cell_angle_gamma": None,
}
cell_param_names = tuple(cell_params)
atom_param_pos = {
"_atom_site_label": 0,
"_atom_site_type_symbol": None,
"_atom_site_fract_x": None,
"_atom_site_fract_y": None,
"_atom_site_fract_z": None,
"_atom_site_U_iso_or_equiv": None,
"_atom_site_occupancy": None,
}
atom_param_names = tuple(atom_param_pos)
for line in fileobj:
line = line.strip()
if line.startswith("_space_group_name_H-M_alt"):
_, val = line.split(maxsplit=1)
params["SPGR"] = val.strip("'")
elif line.startswith(cell_param_names):
param, val = line.split(maxsplit=1)
cell_params[param] = val
elif line.startswith("_atom_site_label"): # assume this is the start of atom data
for ind, line in enumerate(fileobj, start=1):
line = line.strip()
# read fields
if line.startswith("_atom_site"):
if line.startswith(atom_param_names):
atom_param_pos[line] = ind
continue
# read data till an empty line
if not line:
break
vals = line.split()
params["ATOM"].append(" ".join([vals[ind] for ind in atom_param_pos.values()]))
if None not in cell_params.values():
params["CELL"] = " ".join(cell_params.values())
return params
def export_cfl_file(path, params, template=None):
param_names = tuple(params)
if template is None:
template_file = get_zebra_default_cfl_file()
else:
template_file = template
atom_done = False
with open(path, "w") as out_file:
for line in template_file:
if line.startswith(param_names):
if line.startswith("UBMAT"): # only UBMAT values are not on the same line
out_file.write(line)
for i in range(3):
next(template_file)
out_file.write(" ".join(params["UBMAT"][3 * i : 3 * (i + 1)]) + "\n")
elif line.startswith("ATOM"):
if "ATOM" in params:
# replace all ATOM with values in params
while line.startswith("ATOM"):
line = next(template_file)
for atom_line in params["ATOM"]:
out_file.write(f"ATOM {atom_line}\n")
atom_done = True
else:
param, _ = line.split(maxsplit=1)
out_file.write(f"{param} {params[param]}\n")
elif line.startswith("INSTR"):
# replace it with a default name
out_file.write("INSTR zebra.geom\n")
else:
out_file.write(line)
# append ATOM data if it's present and a template did not contain it
if "ATOM" in params and not atom_done:
out_file.write("\n")
for atom_line in params["ATOM"]:
out_file.write(f"ATOM {atom_line}\n")
def sort_hkl_file_bi(file_in, file_out, priority, chunks):
with open(file_in) as fileobj:
file_in_data = fileobj.readlines()
data = np.genfromtxt(file_in, skip_header=3)
stt = data[:, 4]
omega = data[:, 5]
chi = data[:, 6]
phi = data[:, 7]
lines = file_in_data[3:]
lines_update = []
angles = {"2theta": stt, "omega": omega, "chi": chi, "phi": phi}
# Reverse flag
to_reverse = False
to_reverse_p2 = False
to_reverse_p3 = False
# Get indices within first priority
ang_p1 = angles[priority[0]]
begin_p1 = floor(min(ang_p1))
end_p1 = ceil(max(ang_p1))
delta_p1 = chunks[0]
for p1 in range(begin_p1, end_p1, delta_p1):
ind_p1 = [j for j, x in enumerate(ang_p1) if p1 <= x and x < p1 + delta_p1]
stt_new = [stt[x] for x in ind_p1]
omega_new = [omega[x] for x in ind_p1]
chi_new = [chi[x] for x in ind_p1]
phi_new = [phi[x] for x in ind_p1]
lines_new = [lines[x] for x in ind_p1]
angles_p2 = {"stt": stt_new, "omega": omega_new, "chi": chi_new, "phi": phi_new}
# Get indices for second priority
ang_p2 = angles_p2[priority[1]]
if len(ang_p2) > 0 and to_reverse_p2:
begin_p2 = ceil(max(ang_p2))
end_p2 = floor(min(ang_p2))
delta_p2 = -chunks[1]
elif len(ang_p2) > 0 and not to_reverse_p2:
end_p2 = ceil(max(ang_p2))
begin_p2 = floor(min(ang_p2))
delta_p2 = chunks[1]
else:
end_p2 = 0
begin_p2 = 0
delta_p2 = 1
to_reverse_p2 = not to_reverse_p2
for p2 in range(begin_p2, end_p2, delta_p2):
min_p2 = min([p2, p2 + delta_p2])
max_p2 = max([p2, p2 + delta_p2])
ind_p2 = [j for j, x in enumerate(ang_p2) if min_p2 <= x and x < max_p2]
stt_new2 = [stt_new[x] for x in ind_p2]
omega_new2 = [omega_new[x] for x in ind_p2]
chi_new2 = [chi_new[x] for x in ind_p2]
phi_new2 = [phi_new[x] for x in ind_p2]
lines_new2 = [lines_new[x] for x in ind_p2]
angles_p3 = {"stt": stt_new2, "omega": omega_new2, "chi": chi_new2, "phi": phi_new2}
# Get indices for third priority
ang_p3 = angles_p3[priority[2]]
if len(ang_p3) > 0 and to_reverse_p3:
begin_p3 = ceil(max(ang_p3)) + chunks[2]
end_p3 = floor(min(ang_p3)) - chunks[2]
delta_p3 = -chunks[2]
elif len(ang_p3) > 0 and not to_reverse_p3:
end_p3 = ceil(max(ang_p3)) + chunks[2]
begin_p3 = floor(min(ang_p3)) - chunks[2]
delta_p3 = chunks[2]
else:
end_p3 = 0
begin_p3 = 0
delta_p3 = 1
to_reverse_p3 = not to_reverse_p3
for p3 in range(begin_p3, end_p3, delta_p3):
min_p3 = min([p3, p3 + delta_p3])
max_p3 = max([p3, p3 + delta_p3])
ind_p3 = [j for j, x in enumerate(ang_p3) if min_p3 <= x and x < max_p3]
angle_new3 = [angles_p3[priority[3]][x] for x in ind_p3]
ind_final = [x for _, x in sorted(zip(angle_new3, ind_p3), reverse=to_reverse)]
to_reverse = not to_reverse
for i in ind_final:
lines_update.append(lines_new2[i])
with open(file_out, "w") as fileobj:
for _ in range(3):
fileobj.write(file_in_data.pop(0))
fileobj.writelines(lines_update)
def sort_hkl_file_nb(file_in, file_out, priority, chunks):
with open(file_in) as fileobj:
file_in_data = fileobj.readlines()
data = np.genfromtxt(file_in, skip_header=3)
gamma = data[:, 4]
omega = data[:, 5]
nu = data[:, 6]
lines = file_in_data[3:]
lines_update = []
angles = {"gamma": gamma, "omega": omega, "nu": nu}
to_reverse = False
to_reverse_p2 = False
# Get indices within first priority
ang_p1 = angles[priority[0]]
begin_p1 = floor(min(ang_p1))
end_p1 = ceil(max(ang_p1))
delta_p1 = chunks[0]
for p1 in range(begin_p1, end_p1, delta_p1):
ind_p1 = [j for j, x in enumerate(ang_p1) if p1 <= x and x < p1 + delta_p1]
# Get angles from within nu range
lines_new = [lines[x] for x in ind_p1]
gamma_new = [gamma[x] for x in ind_p1]
omega_new = [omega[x] for x in ind_p1]
nu_new = [nu[x] for x in ind_p1]
angles_p2 = {"gamma": gamma_new, "omega": omega_new, "nu": nu_new}
# Get indices for second priority
ang_p2 = angles_p2[priority[1]]
if len(gamma_new) > 0 and to_reverse_p2:
begin_p2 = ceil(max(ang_p2))
end_p2 = floor(min(ang_p2))
delta_p2 = -chunks[1]
elif len(gamma_new) > 0 and not to_reverse_p2:
end_p2 = ceil(max(ang_p2))
begin_p2 = floor(min(ang_p2))
delta_p2 = chunks[1]
else:
end_p2 = 0
begin_p2 = 0
delta_p2 = 1
to_reverse_p2 = not to_reverse_p2
for p2 in range(begin_p2, end_p2, delta_p2):
min_p2 = min([p2, p2 + delta_p2])
max_p2 = max([p2, p2 + delta_p2])
ind_p2 = [j for j, x in enumerate(ang_p2) if min_p2 <= x and x < max_p2]
angle_new2 = [angles_p2[priority[2]][x] for x in ind_p2]
ind_final = [x for _, x in sorted(zip(angle_new2, ind_p2), reverse=to_reverse)]
to_reverse = not to_reverse
for i in ind_final:
lines_update.append(lines_new[i])
with open(file_out, "w") as fileobj:
for _ in range(3):
fileobj.write(file_in_data.pop(0))
fileobj.writelines(lines_update)

View File

@ -1,36 +1,20 @@
import os import os
import numpy as np ZEBRA_PROPOSALS_PATHS = [
f"/afs/psi.ch/project/sinqdata/{year}/zebra/" for year in (2016, 2017, 2018, 2020, 2021)
SINQ_PATH = "/afs/psi.ch/project/sinqdata" ]
ZEBRA_PROPOSALS_PATH = os.path.join(SINQ_PATH, "{year}/zebra/{proposal}")
def find_proposal_path(proposal): def find_proposal_path(proposal):
for entry in os.scandir(SINQ_PATH): proposal = proposal.strip()
if entry.is_dir() and len(entry.name) == 4 and entry.name.isdigit(): if proposal:
proposal_path = ZEBRA_PROPOSALS_PATH.format(year=entry.name, proposal=proposal) for zebra_proposals_path in ZEBRA_PROPOSALS_PATHS:
proposal_path = os.path.join(zebra_proposals_path, proposal)
if os.path.isdir(proposal_path): if os.path.isdir(proposal_path):
# found it # found it
break break
else:
raise ValueError(f"Can not find data for proposal '{proposal}'.")
else: else:
raise ValueError(f"Can not find data for proposal '{proposal}'") proposal_path = ""
return proposal_path return proposal_path
def parse_hkl(fileobj, data_type):
next(fileobj)
fields = map(str.lower, next(fileobj).strip("!").strip().split())
next(fileobj)
data = np.loadtxt(fileobj, unpack=True)
res = dict(zip(fields, data))
# adapt to .ccl/.dat files naming convention
res["counts"] = res.pop("f2")
if data_type == ".hkl":
for ind in ("h", "k", "l"):
res[ind] = res[ind].astype(int)
return res

View File

@ -2,16 +2,8 @@ import numpy as np
from numba import njit from numba import njit
pi_r = 180 / np.pi pi_r = 180 / np.pi
IMAGE_W = 256
IMAGE_H = 128
XNORM = 128
YNORM = 64
XPIX = 0.734
YPIX = 1.4809
@njit(cache=True)
def z4frgn(wave, ga, nu): def z4frgn(wave, ga, nu):
"""CALCULATES DIFFRACTION VECTOR IN LAB SYSTEM FROM GA AND NU """CALCULATES DIFFRACTION VECTOR IN LAB SYSTEM FROM GA AND NU
@ -23,34 +15,36 @@ def z4frgn(wave, ga, nu):
""" """
ga_r = ga / pi_r ga_r = ga / pi_r
nu_r = nu / pi_r nu_r = nu / pi_r
z4 = [np.sin(ga_r) * np.cos(nu_r), np.cos(ga_r) * np.cos(nu_r) - 1.0, np.sin(nu_r)] z4 = [0.0, 0.0, 0.0]
z4[0] = (np.sin(ga_r) * np.cos(nu_r)) / wave
z4[1] = (np.cos(ga_r) * np.cos(nu_r) - 1.0) / wave
z4[2] = (np.sin(nu_r)) / wave
return np.array(z4) / wave return z4
@njit(cache=True) @njit(cache=True)
def phimat_T(phi): def phimat(phi):
"""TRANSPOSED BUSING AND LEVY CONVENTION ROTATION MATRIX FOR PHI OR OMEGA """BUSING AND LEVY CONVENTION ROTATION MATRIX FOR PHI OR OMEGA
Args: Args:
PHI PHI
Returns: Returns:
DUM_T DUM
""" """
ph_r = phi / pi_r ph_r = phi / pi_r
dum = np.zeros((3, 3)) dum = np.zeros(9).reshape(3, 3)
dum[0, 0] = np.cos(ph_r) dum[0, 0] = np.cos(ph_r)
dum[1, 0] = np.sin(ph_r) dum[0, 1] = np.sin(ph_r)
dum[0, 1] = -dum[1, 0] dum[1, 0] = -dum[0, 1]
dum[1, 1] = dum[0, 0] dum[1, 1] = dum[0, 0]
dum[2, 2] = 1 dum[2, 2] = 1
return dum return dum
@njit(cache=True)
def z1frnb(wave, ga, nu, om): def z1frnb(wave, ga, nu, om):
"""CALCULATE DIFFRACTION VECTOR Z1 FROM GA, OM, NU, ASSUMING CH=PH=0 """CALCULATE DIFFRACTION VECTOR Z1 FROM GA, OM, NU, ASSUMING CH=PH=0
@ -61,28 +55,30 @@ def z1frnb(wave, ga, nu, om):
Z1 Z1
""" """
z4 = z4frgn(wave, ga, nu) z4 = z4frgn(wave, ga, nu)
z3 = phimat_T(phi=om).dot(z4) dum = phimat(phi=om)
dumt = np.transpose(dum)
z3 = dumt.dot(z4)
return z3 return z3
@njit(cache=True) @njit(cache=True)
def chimat_T(chi): def chimat(chi):
"""TRANSPOSED BUSING AND LEVY CONVENTION ROTATION MATRIX FOR CHI """BUSING AND LEVY CONVENTION ROTATION MATRIX FOR CHI
Args: Args:
CHI CHI
Returns: Returns:
DUM_T DUM
""" """
ch_r = chi / pi_r ch_r = chi / pi_r
dum = np.zeros((3, 3)) dum = np.zeros(9).reshape(3, 3)
dum[0, 0] = np.cos(ch_r) dum[0, 0] = np.cos(ch_r)
dum[2, 0] = np.sin(ch_r) dum[0, 2] = np.sin(ch_r)
dum[1, 1] = 1 dum[1, 1] = 1
dum[0, 2] = -dum[2, 0] dum[2, 0] = -dum[0, 2]
dum[2, 2] = dum[0, 0] dum[2, 2] = dum[0, 0]
return dum return dum
@ -98,8 +94,13 @@ def z1frz3(z3, chi, phi):
Returns: Returns:
Z1 Z1
""" """
z2 = chimat_T(chi).dot(z3) dum1 = chimat(chi)
z1 = phimat_T(phi).dot(z2) dum2 = np.transpose(dum1)
z2 = dum2.dot(z3)
dum1 = phimat(phi)
dum2 = np.transpose(dum1)
z1 = dum2.dot(z2)
return z1 return z1
@ -281,81 +282,115 @@ def fixdnu(wave, z1, ch2, ph2, nu):
return ch, ph, ga, om return ch, ph, ga, om
def ang2hkl(wave, ddist, gammad, om, ch, ph, nud, ub_inv, x, y): # for test run:
"""Calculate hkl-indices of a reflection from its position (x,y,angles) at the 2d-detector""" # angtohkl(wave=1.18,ddist=616,gammad=48.66,om=-22.80,ch=0,ph=0,nud=0,x=128,y=64)
ga, nu = det2pol(ddist, gammad, nud, x, y)
z1 = z1frmd(wave, ga, om, ch, ph, nu)
hkl = ub_inv @ z1
return hkl
def ang2hkl_det(wave, ddist, gammad, om, chi, phi, nud, ub_inv): def angtohkl(wave, ddist, gammad, om, ch, ph, nud, x, y):
"""Calculate hkl-indices of a reflection from its position (x,y,angles) at the 2d-detector""" """finds hkl-indices of a reflection from its position (x,y,angles) at the 2d-detector
xv, yv = np.meshgrid(range(IMAGE_W), range(IMAGE_H))
xobs = (xv.ravel() - XNORM) * XPIX
yobs = (yv.ravel() - YNORM) * YPIX
a = xobs Args:
b = ddist * np.cos(yobs / ddist) gammad, om, ch, ph, nud, xobs, yobs
z = ddist * np.sin(yobs / ddist)
d = np.sqrt(a * a + b * b)
gamma = gammad + np.arctan2(a, b) * pi_r Returns:
nu = nud + np.arctan2(z, d) * pi_r
gamma_r = gamma / pi_r """
nu_r = nu / pi_r # define ub matrix if testing angtohkl(wave=1.18,ddist=616,gammad=48.66,om=-22.80,ch=0,ph=0,nud=0,x=128,y=64) against f90:
z4 = np.vstack( # ub = np.array([-0.0178803,-0.0749231,0.0282804,-0.0070082,-0.0368001,-0.0577467,0.1609116,-0.0099281,0.0006274]).reshape(3,3)
( ub = np.array(
np.sin(gamma_r) * np.cos(nu_r) / wave, [0.04489, 0.02045, -0.2334, -0.06447, 0.00129, -0.16356, -0.00328, 0.2542, 0.0196]
(np.cos(gamma_r) * np.cos(nu_r) - 1) / wave, ).reshape(3, 3)
np.sin(nu_r) / wave, print(
) "The input values are: ga=",
gammad,
", om=",
om,
", ch=",
ch,
", ph=",
ph,
", nu=",
nud,
", x=",
x,
", y=",
y,
) )
om_r = om / pi_r ga, nu = det2pol(ddist, gammad, nud, x, y)
dum3 = np.zeros((3, 3))
dum3[0, 0] = np.cos(om_r)
dum3[1, 0] = np.sin(om_r)
dum3[0, 1] = -dum3[1, 0]
dum3[1, 1] = dum3[0, 0]
dum3[2, 2] = 1
chi_r = chi / pi_r print(
dum2 = np.zeros((3, 3)) "The calculated actual angles are: ga=",
dum2[0, 0] = np.cos(chi_r) ga,
dum2[2, 0] = np.sin(chi_r) ", om=",
dum2[1, 1] = 1 om,
dum2[0, 2] = -dum2[2, 0] ", ch=",
dum2[2, 2] = dum2[0, 0] ch,
", ph=",
ph,
", nu=",
nu,
)
phi_r = phi / pi_r
dum1 = np.zeros((3, 3))
dum1[0, 0] = np.cos(phi_r)
dum1[1, 0] = np.sin(phi_r)
dum1[0, 1] = -dum1[1, 0]
dum1[1, 1] = dum1[0, 0]
dum1[2, 2] = 1
hkl = (ub_inv @ dum1 @ dum2 @ dum3 @ z4).reshape(3, IMAGE_H, IMAGE_W)
return hkl
def ang2hkl_1d(wave, ga, om, ch, ph, nu, ub_inv):
"""Calculate hkl-indices of a reflection from its position (angles) at the 1d-detector"""
z1 = z1frmd(wave, ga, om, ch, ph, nu) z1 = z1frmd(wave, ga, om, ch, ph, nu)
hkl = ub_inv @ z1
print("The diffraction vector is:", z1[0], z1[1], z1[2])
ubinv = np.linalg.inv(ub)
h = ubinv[0, 0] * z1[0] + ubinv[0, 1] * z1[1] + ubinv[0, 2] * z1[2]
k = ubinv[1, 0] * z1[0] + ubinv[1, 1] * z1[1] + ubinv[1, 2] * z1[2]
l = ubinv[2, 0] * z1[0] + ubinv[2, 1] * z1[1] + ubinv[2, 2] * z1[2]
print("The Miller indexes are:", h, k, l)
ch2, ph2 = eqchph(z1)
ch, ph, ga, om = fixdnu(wave, z1, ch2, ph2, nu)
print(
"Bisecting angles to put reflection into the detector center: ga=",
ga,
", om=",
om,
", ch=",
ch,
", ph=",
ph,
", nu=",
nu,
)
def ang2hkl(wave, ddist, gammad, om, ch, ph, nud, ub, x, y):
"""Calculate hkl-indices of a reflection from its position (x,y,angles) at the 2d-detector
"""
ga, nu = det2pol(ddist, gammad, nud, x, y)
z1 = z1frmd(wave, ga, om, ch, ph, nu)
ubinv = np.linalg.inv(ub)
hkl = ubinv @ z1
return hkl return hkl
def ang_proc(wave, ddist, gammad, om, ch, ph, nud, x, y): def ang_proc(wave, ddist, gammad, om, ch, ph, nud, x, y):
"""Utility function to calculate ch, ph, ga, om""" """Utility function to calculate ch, ph, ga, om
"""
ga, nu = det2pol(ddist, gammad, nud, x, y) ga, nu = det2pol(ddist, gammad, nud, x, y)
z1 = z1frmd(wave, ga, om, ch, ph, nu) z1 = z1frmd(wave, ga, om, ch, ph, nu)
ch2, ph2 = eqchph(z1) ch2, ph2 = eqchph(z1)
ch, ph, ga, om = fixdnu(wave, z1, ch2, ph2, nu) ch, ph, ga, om = fixdnu(wave, z1, ch2, ph2, nu)
return ch, ph, ga, om return ch, ph, ga, om
def gauss(x, *p):
"""Defines Gaussian function
Args:
A - amplitude, mu - position of the center, sigma - width
Returns:
Gaussian function
"""
A, mu, sigma = p
return A * np.exp(-((x - mu) ** 2) / (2.0 * sigma ** 2))

4
scripts/pyzebra-start.sh Normal file
View File

@ -0,0 +1,4 @@
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
conda activate prod
pyzebra --port=80 --allow-websocket-origin=pyzebra.psi.ch:80 --spind-path=/home/pyzebra/spind

View File

@ -0,0 +1,4 @@
source /home/pyzebra/miniconda3/etc/profile.d/conda.sh
conda activate test
python ~/pyzebra/pyzebra/app/cli.py --allow-websocket-origin=pyzebra.psi.ch:5006 --spind-path=/home/pyzebra/spind

View File

@ -0,0 +1,11 @@
[Unit]
Description=pyzebra-test web server (runs on port 5006)
[Service]
Type=simple
User=pyzebra
ExecStart=/bin/bash /usr/local/sbin/pyzebra-test-start.sh
Restart=always
[Install]
WantedBy=multi-user.target

10
scripts/pyzebra.service Normal file
View File

@ -0,0 +1,10 @@
[Unit]
Description=pyzebra web server
[Service]
Type=simple
ExecStart=/bin/bash /usr/local/sbin/pyzebra-start.sh
Restart=always
[Install]
WantedBy=multi-user.target