2 Commits

Author SHA1 Message Date
fcd6a56e4e new feature: preview and import of pshell data files 2016-04-09 13:07:37 +02:00
29d4b6881f new angle-scan processing features, update documentation, bugfixes
new functions:
- voigt_fwhm_lor
- rotate_hemi_scan
- import_tpi_scan
- draw_diffraction_cone

updated functions:
- interpolate_hemi_scan
- display_hemi_scan
- duplicate_hemi_scan

updated documentation:
- installation instructions
- readme
2016-03-30 12:02:57 +02:00
11 changed files with 2731 additions and 279 deletions

34
README.md Normal file
View File

@ -0,0 +1,34 @@
Introduction
============
PEARL Procedures is a suite of Igor Pro procedures developed for data acquisition and data processing at the PEARL beamline at the Swiss Light Source.
Installation
============
PEARL Procedures should be installed according to the regular Igor Pro guidelines. Please read the Igor help `About Igor Pro User Files` for details.
- Make a `pearl-procs` directory in your private or shared `User Procedures` folder, and copy the PEARL Procedures distribution there.
- Create shortcuts of the `pearl-arpes.ipf` and `pearl-menu.ipf` files, and move them to the `Igor Procedures` folder next to your `User Procedures` folder.
- Find the `HDF5.XOP` extension in the `Igor Pro Folder` under `More Extensions/File Loaders`, create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder.
- Find the `HDF5 Help.ihf` next to `HDF5.XOP`, create a shortcut, and move the shortcut to the `Igor Help Files` folder next to your `User Procedures` folder.
License
=======
The source code of PEARL Procedures is available under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0) at <https://git.psi.ch/pearl-public/igor-procs>.
Users of PEARL Procedures are requested to coordinate and share the development of the code with the original author.
Please read and respect the respective license agreements.
Author
------
Matthias Muntwiler, <mailto:matthias.muntwiler@psi.ch>
Copyright
---------
Copyright 2009-2016 by [Paul Scherrer Institut](http://www.psi.ch)

View File

@ -758,7 +758,8 @@ WARN_LOGFILE =
# spaces.
# Note: If this tag is empty the current directory is searched.
INPUT = ../pearl
INPUT = ../pearl \
src
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
@ -778,7 +779,8 @@ INPUT_ENCODING = CP1252
# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
# *.qsf, *.as and *.js.
FILE_PATTERNS = *.ipf
FILE_PATTERNS = *.ipf \
*.dox
# The RECURSIVE tag can be used to specify whether or not subdirectories should
# be searched for input files as well.
@ -863,7 +865,7 @@ IMAGE_PATH =
# code is scanned, but not when the output code is generated. If lines are added
# or removed, the anchors will not be placed correctly.
INPUT_FILTER = "gawk -f doxygen-filter-ipf.awk"
INPUT_FILTER =
# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
# basis. Doxygen will compare the file name with each pattern and apply the
@ -872,7 +874,7 @@ INPUT_FILTER = "gawk -f doxygen-filter-ipf.awk"
# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
# patterns match the file name, INPUT_FILTER is applied.
FILTER_PATTERNS =
FILTER_PATTERNS = "*.ipf=\"gawk -f doxygen-filter-ipf.awk\""
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
# INPUT_FILTER) will also be used to filter the input files that are used for

View File

@ -11,6 +11,9 @@ DOX=doxygen
DOXOPTS=
LATEX_DIR=latex
REVISION=$(shell git describe --always --tags --dirty --long)
export REVISION
all: docs
docs: doxygen pdf
@ -24,4 +27,3 @@ pdf: doxygen
clean:
-rm latex/*
-rm html/*

27
doc/src/mainpage.dox Normal file
View File

@ -0,0 +1,27 @@
/*! @mainpage Introduction
\section sec_intro Introduction
PEARL Procedures is a suite of Igor Pro procedures developed for data acquisition and data processing at the PEARL beamline at the Swiss Light Source.
\section sec_install Installation
PEARL Procedures should be installed according to the regular Igor Pro guidelines. Please read the Igor help `About Igor Pro User Files` for details.
- Make a `pearl-procs` directory in your private or shared `User Procedures` folder, and copy the PEARL Procedures distribution there.
- Create shortcuts of the `pearl-arpes.ipf` and `pearl-menu.ipf` files, and move them to the `Igor Procedures` folder next to your `User Procedures` folder.
- Find the `HDF5.XOP` extension in the `Igor Pro Folder` under `More Extensions/File Loaders`, create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder.
- Find the `HDF5 Help.ihf` next to `HDF5.XOP`, create a shortcut, and move the shortcut to the `Igor Help Files` folder next to your `User Procedures` folder.
\section sec_license License Information
An open distribution of PEARL Procedures is available under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0) at <https://git.psi.ch/pearl-public/igor-procs>.
Users of PEARL Procedures are requested to coordinate and share the development of the code with the original author.
Please read and respect the respective license agreements.
\author Matthias Muntwiler, <mailto:matthias.muntwiler@psi.ch>
\version This documentation is compiled from revision $(REVISION).
\copyright 2009-2016 by [Paul Scherrer Institut](http://www.psi.ch)
\copyright Licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
*/

97
mm/mm-physconst.ipf Normal file
View File

@ -0,0 +1,97 @@
#pragma rtGlobals=1 // Use modern global access method.
#pragma version = 1.05
// physical constants
// original version: 03-05-23 by mm
// $Id$
// source: CODATA 2002 [Rev. Mod. Phys. 77, 1 (2005)]
// universal constants
constant kSpeedOfLight = 2.99792458e8 // m/s
constant kMagnField = 1.25663706144e-6 // V s / A / m
constant kElField = 8.854187817620e-12 // A s / V / m
constant kGravitation = 6.6742e-11 // m^3 / kg / s^2
constant kHBar = 6.58211915e-16 // eV s
constant kPlanckBar = 6.58211915e-16 // eV s
constant kPlanck = 4.13566743e-15 // eV s
constant kHBarC = 197.326968 // MeV fm
constant kHC = 1239.84190605 // eV nm
constant kHCicm = 1239.84190605e-7 // eV cm^-1
constant kPlanckBarSI = 1.05457168e-34 // J s
constant kPlanckSI = 6.6260693e-34 // J s
// electromagnetic constants
constant kElCharge = 1.60217653e-19 // A s
constant kMagnFlux = 2.06783372e-15 // Wb
constant kConductance = 7.748091733e-5 // S
constant kBohrMagneton = 5.788381804e-5 // eV T^-1
constant kBohrMagnetonSI = 9.27400949e-24 // J T^-1 = A m^2
constant kNuclearMagneton = 3.152451259e-8 // eV T^-1
constant kNuclearMagnetonSI = 5.05078343e-27 // J T^-1
// atomic and nuclear constants
constant kFineStruct = 7.297352568e-3
constant kInvFineStruct = 137.03599911
constant kRydberg = 10973731.568525 // m^-1
constant kRydbergEnergy = 13.6056923 // eV
constant kBohrRadius = 0.5291772108e-10 // m
constant kHartreeEnergy = 27.2113845 // eV
constant kHartreeEnergySI = 4.35974417 // J
constant kElectronMass = 510.998918e3 // eV c^-2
constant kMuonMass = 105.6583692e6 // eV c^-2
constant kProtonMass = 938.272029e6 // eV c^-2
constant kNeutronMass = 939.565360e6 // eV c^-2
constant kElectronMassSI = 9.1093826e-31 // kg
constant kProtonMassSI = 1.67262171e-27 // kg
constant kComptonWavelength = 2.426310238e-12 // m
constant kElectronRadius = 2.817940325e-15 // m
constant kThomsonCrossSection = 0.665245873e-28 // m^2
constant kElectronGFactor = -2.0023193043718
// physico-chemical constants
constant kAvogadro = 6.0221415e23 // 1 / mol
constant kAtomicMassUnit = 931.494043e6 // eV / c^2
constant kAtomicMassUnitSI = 1.66053886e-27 // kg
constant kMolarGasSI = 8.314472 // J / K / mol
constant kBoltzmann = 8.617343e-5 // eV / K
constant kBoltzmannSI = 1.3806505e-23 // J /K
constant kWien = 2.8977685e-3 // m K
constant kStefanBoltzmann = 5.670400e-8 // W m^-2 K^-4
constant kJoulesPerEV = 1.60217653e-19 // J / eV
constant kEVPerHartree = 27.2113845 // eV / Eh
// custom constants
constant kFreeElectronDispersion = 3.79736 // eV Angstrom^2
// = h_bar^2 * c^2 / (2 * m_e)
// for E = kFreeElectronDispersion * k^2
threadsafe function FreeElectronWavelength(ekin, [v0, meff])
// Wavelength of a quasi-free electron in meters
variable ekin // kinetic energy of the electron in eV
variable v0 // inner potential (where applicable), default = 0
variable meff // effective mass relative to free electron, default = 1
if (ParamIsDefault(v0))
v0 = 0
endif
if (ParamIsDefault(meff))
meff = 1
endif
return khc * 1e-9 / sqrt(2 * kElectronMass * meff * (ekin + v0))
end

View File

@ -1204,6 +1204,7 @@ function duplicate_hemi_scan(source_nickname, dest_folder, dest_nickname, [xpdpl
string s_theta = s_prefix + "th"
string s_tot = s_prefix + "tot"
string s_weight = s_prefix + "wt"
string s_matrix = s_prefix + "matrix"
wave theta1 = $s_theta
wave polar1 = $s_polar
@ -1211,6 +1212,7 @@ function duplicate_hemi_scan(source_nickname, dest_folder, dest_nickname, [xpdpl
wave tot1 = $s_tot
wave weight1 = $s_weight
wave values1 = $s_int
wave /z matrix1 = $s_matrix
variable npol = numpnts(theta1)
@ -1230,6 +1232,7 @@ function duplicate_hemi_scan(source_nickname, dest_folder, dest_nickname, [xpdpl
s_theta = s_prefix + "th"
s_tot = s_prefix + "tot"
s_weight = s_prefix + "wt"
s_matrix = s_prefix + "matrix"
wave theta2 = $s_theta
wave polar2 = $s_polar
@ -1241,6 +1244,9 @@ function duplicate_hemi_scan(source_nickname, dest_folder, dest_nickname, [xpdpl
tot2 = tot1
weight2 = weight1
values2 = values1
if (waveexists(matrix1))
duplicate /o matrix1, $s_matrix
endif
if (!(NumberByKey("version", note(azim1), "=", "\r") >= 1.6))
azim2 += 180 // changed 151030 (v1.6)
@ -1250,6 +1256,48 @@ function duplicate_hemi_scan(source_nickname, dest_folder, dest_nickname, [xpdpl
setdatafolder saveDF
end
/// azimuthally rotate a hemispherical scan dataset.
///
/// this function works only for hemi scans created by make_hemi_grid() (or compatible functions).
///
/// @param nickname name prefix for waves. source data must be in current data folder.
/// @param angle azimuthal rotation angle in degrees.
///
function rotate_hemi_scan(nickname, angle)
string nickname
variable angle
dfref savedf = getdatafolderdfr()
if (strlen(nickname))
string s_prefix = nickname + "_"
string s_int = s_prefix + "i"
else
s_prefix = ""
s_int = "values"
endif
string s_polar = s_prefix + "pol"
string s_azim = s_prefix + "az"
string s_tot = s_prefix + "tot"
string s_weight = s_prefix + "wt"
wave polar = $s_polar
wave azim = $s_azim
wave tot = $s_tot
wave weight = $s_weight
wave values = $s_int
azim += angle
azim = azim < 0 ? azim + 360 : azim
azim = azim >= 360 ? azim - 360 : azim
duplicate /free polar, neg_polar
neg_polar = -polar
sort {neg_polar, azim}, polar, azim, tot, weight, values
setdatafolder saveDF
end
/// display a plot of a hemispherical angle scan.
///
/// the scan data must exist in the current data folder.
@ -1264,10 +1312,29 @@ end
/// @arg 0 linear
/// @arg 1 stereographic (default)
/// @arg 2 azimuthal
/// @arg 3 gnomonic (0 <= polar < 90).
///
/// @param graphtype type of graph
/// @arg 1 Igor "New Polar" (default)
/// @arg 2 XPDplot (reserved, not implemented)
/// @arg 1 (pol, az) trace in Igor "New Polar" (default).
/// @arg 2 XPDplot (reserved, not implemented).
/// @arg 3 matrix in Igor "New Polar".
/// the matrix wave is a 2D wave with X and Y scaling corresponding to the selected projection.
/// matrix waves can be created by interpolate_hemi_scan().
/// note: the pol and az waves are required as well.
///
/// @param do_ticks select which ticks to draw.
/// value must be the arithmetic OR of all selected items.
/// default: 3
/// @arg 0 none
/// @arg 1 major azimuthal
/// @arg 2 minor azimuthal
///S
/// @param do_grids select which grids to draw.
/// value must be the arithmetic OR of all selected items.
/// default: 3
/// @arg 0 none
/// @arg 1 radius at 0 and 90 degree azimuth
/// @arg 2 circle at 30 and 60 degree polar
///
/// @param graphname name of graph window. default: nickname
/// if empty, a default name is assigned.
@ -1275,10 +1342,12 @@ end
///
/// @returns the name of the graph window
///
function /s display_hemi_scan(nickname, [projection, graphtype, graphname])
function /s display_hemi_scan(nickname, [projection, graphtype, do_ticks, do_grids, graphname])
string nickname
variable projection
variable graphtype
variable do_ticks
variable do_grids
string graphname
if (ParamIsDefault(projection))
@ -1287,6 +1356,12 @@ function /s display_hemi_scan(nickname, [projection, graphtype, graphname])
if (ParamIsDefault(graphtype))
graphtype = 1
endif
if (ParamIsDefault(do_ticks))
do_ticks = 3
endif
if (ParamIsDefault(do_grids))
do_grids = 3
endif
if (ParamIsDefault(graphname))
if (strlen(nickname) > 0)
graphname = nickname
@ -1305,9 +1380,12 @@ function /s display_hemi_scan(nickname, [projection, graphtype, graphname])
endif
string s_polar = s_prefix + "pol"
string s_azim = s_prefix + "az"
string s_matrix = s_prefix + "matrix"
wave /z values = $s_int
wave /z azim = $s_azim
wave /z polar = $s_polar
wave /z matrix = $s_matrix
string s_ster_rad = s_prefix + "ster_rad"
duplicate /o polar, $s_ster_rad /wave=ster_rad
@ -1325,11 +1403,12 @@ function /s display_hemi_scan(nickname, [projection, graphtype, graphname])
azim_offset = 180 // changed 151030 (v1.6)
endif
string s_trace
switch(graphtype)
case 1:
graphname = display_polar_graph(graphname, angle_offset=azim_offset)
graphname = display_polar_graph(graphname, angle_offset=azim_offset, do_ticks=do_ticks)
string s_trace = WMPolarAppendTrace(graphname, ster_rad, azim, 360)
s_trace = WMPolarAppendTrace(graphname, ster_rad, azim, 360)
ModifyGraph /W=$graphname mode($s_trace)=2, lsize($s_trace)=2
ModifyGraph /W=$graphname zColor($s_trace)={values,*,*,BlueGreenOrange,0}
@ -1338,7 +1417,21 @@ function /s display_hemi_scan(nickname, [projection, graphtype, graphname])
ColorScale /W=$graphname /C /N=text0 nticks=2, minor=1, tickLen=4.00, tickThick=0.50
SetWindow $graphname, userdata(projection)=num2str(projection)
draw_hemi_axes(graphname)
draw_hemi_axes(graphname, do_grids=do_grids)
break
case 3:
graphname = display_polar_graph(graphname, angle_offset=azim_offset, do_ticks=do_ticks)
s_trace = WMPolarAppendTrace(graphname, ster_rad, azim, 360)
ModifyGraph /W=$graphname mode($s_trace)=0, lsize($s_trace)=0
AppendImage /L=VertCrossing /B=HorizCrossing matrix
ColorScale /W=$graphname /C /N=text0 /E=2 /F=0 /B=1 /A=RB /X=0.00 /Y=0.00 image=$s_matrix
ColorScale /W=$graphname /C /N=text0 side=2, width=5, heightPct=40, frame=0.50, lblMargin=0
ColorScale /W=$graphname /C /N=text0 nticks=2, minor=1, tickLen=4.00, tickThick=0.50
SetWindow $graphname, userdata(projection)=num2str(projection)
draw_hemi_axes(graphname, do_grids=do_grids)
break
endswitch
@ -1373,22 +1466,33 @@ end
/// for hemi grids created with earlier versions,
/// it should be set to 180 for correct orientation.
///
/// @param do_ticks select which ticks to draw.
/// value must be the arithmetic OR of all selected items.
/// default: 3
/// @arg 0 none
/// @arg 1 major azimuthal
/// @arg 2 minor azimuthal
///
/// @returns the name of the graph window.
///
/// @version 1.7
/// interface change: the trace drawing code is moved to display_hemi_scan,
/// so that this function can be reused by other graph types, e.g. display_scanlines.
///
static function /s display_polar_graph(graphname, [angle_offset])
static function /s display_polar_graph(graphname, [angle_offset, do_ticks])
string graphname
variable angle_offset
variable do_ticks
dfref savedf = GetDataFolderDFR()
if (ParamIsDefault(angle_offset))
angle_offset = 0
endif
if (ParamIsDefault(do_ticks))
do_ticks = 3
endif
if ((strlen(graphname) == 0) || (wintype(graphname) == 0))
Display /k=1 /W=(10,45,360,345)
@ -1408,15 +1512,28 @@ static function /s display_polar_graph(graphname, [angle_offset])
WMPolarGraphSetVar(graphname, "doPolarGrids", 0)
WMPolarGraphSetVar(graphname, "doRadiusTickLabels", 0)
WMPolarGraphSetStr(graphname, "radiusAxisWhere", " Off ")
WMPolarGraphSetStr(graphname, "radiusAxesWhere", " Off") // note the leading spaces, cf. WMPolarAnglesForRadiusAxes
WMPolarGraphSetStr(graphname, "radiusTicksLocation", "Off")
WMPolarGraphSetVar(graphname, "majorTickLength", 5)
WMPolarGraphSetVar(graphname, "majorTickLength", 2)
WMPolarGraphSetVar(graphname, "majorTickThick", 0.5)
WMPolarGraphSetVar(graphname, "minorTickLength", 3)
WMPolarGraphSetVar(graphname, "minorTickLength", 1)
WMPolarGraphSetVar(graphname, "minorTickThick", 0.5)
WMPolarGraphSetVar(graphname, "tickLabelOpaque", 0)
WMPolarGraphSetVar(graphname, "tickLabelFontSize", 7)
// changes
if (do_ticks & 1)
WMPolarGraphSetStr(graphname, "angleTicksLocation", "Outside")
else
WMPolarGraphSetStr(graphname, "angleTicksLocation", "Off")
endif
if (do_ticks & 2)
WMPolarGraphSetVar(graphname, "doMinorAngleTicks", 1)
else
WMPolarGraphSetVar(graphname, "doMinorAngleTicks", 0)
endif
DoWindow /T $graphname, graphname
// cursor info in angles
@ -1452,11 +1569,11 @@ static function /s display_polar_graph(graphname, [angle_offset])
return graphname
end
/// draw polar and azimuthal axes in an existing polar graph
/// draw polar and azimuthal grids in an existing polar graph.
///
/// the function adds the following draw objects to a polar graph:
/// * concentric circles at polar angles 0, 30, 60, and 90 degrees with labels.
/// * labels for azimuthal angles at 0, 30, 60, and 90 degrees.
/// * concentric circles at polar angles 0, 30, and 60 degrees with labels.
/// * radial axes at 0 and 90 degree azimuth.
///
/// the objects are added to the ProgFront drawing layer and will appear in front of the data trace.
/// in interactive drawing mode, you can select the active drawing layer by clicking the tree icon
@ -1467,11 +1584,23 @@ end
///
/// @param graphname name of graph window.
///
/// @param do_grids select which optional grids to draw.
/// value must be the arithmetic OR of all selected items.
/// default: 3
/// @arg 0 none
/// @arg 1 radius at 0 and 90 degree azimuth
/// @arg 2 circle at 30 and 60 degree polar
///
/// @warning EXPERIMENTAL!
/// this function is under development.
/// the interface and behaviour of this function may change significantly in future versions.
static function /s draw_hemi_axes(graphname)
static function /s draw_hemi_axes(graphname, [do_grids])
string graphname
variable do_grids
if (ParamIsDefault(do_grids))
do_grids = 3
endif
dfref savedf = GetDataFolderDFR()
@ -1490,26 +1619,96 @@ static function /s draw_hemi_axes(graphname)
//SetDrawEnv /W=$graphname linefgc=(65535,65535,65535)
SetDrawEnv /W=$graphname save
variable radi
radi = calc_graph_radius(0.5, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
radi = calc_graph_radius(30, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
radi = calc_graph_radius(60, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
DrawLine /W=$graphname 0, -2, 0, 2
DrawLine /W=$graphname -2, 0, 2, 0
if (do_grids & 1)
DrawLine /W=$graphname 0, -2, 0, 2
DrawLine /W=$graphname -2, 0, 2, 0
endif
SetDrawEnv /W=$graphname textxjust= 1,textyjust= 2
SetDrawEnv /W=$graphname save
radi = calc_graph_radius(30, projection=projection)
DrawText /W=$graphname radi, -0.1, "30<33>"
radi = calc_graph_radius(60, projection=projection)
DrawText /W=$graphname radi, -0.1, "60<36>"
variable radi
if (do_grids & 2)
radi = calc_graph_radius(0.5, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
radi = calc_graph_radius(30, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
radi = calc_graph_radius(60, projection=projection)
DrawOval /W=$graphname -radi, radi, radi, -radi
SetDrawEnv /W=$graphname textxjust= 1,textyjust= 2
SetDrawEnv /W=$graphname save
radi = calc_graph_radius(30, projection=projection)
DrawText /W=$graphname radi, -0.1, "30<33>"
radi = calc_graph_radius(60, projection=projection)
DrawText /W=$graphname radi, -0.1, "60<36>"
endif
setdatafolder savedf
end
/// draw the circle of a diffraction cone in a stereographic polar graph.
///
/// the diffraction cone consists of a circle marking the diffraction ring, and a dot marking the axis.
/// the cone is drawn as a group of draw objects on the UserFront layer.
/// the objects can be edited interactively.
///
/// @param graphname name of graph window (not implemented yet).
///
/// @param groupname name of a drawing group.
/// if the group exists (from a previous cone) it is replaced.
/// if the group doesn't exist, a new one is created.
///
/// @param theta_axis polar angle of the cone axis in degrees.
///
/// @param theta_inner polar angle of the innermost point of the circle in degrees.
///
/// @param phi azimuthal angle of the cone axis in degrees.
///
/// @warning EXPERIMENTAL!
/// this function is under development.
/// the interface and behaviour of this function may change significantly in future versions.
///
function draw_diffraction_cone(graphname, groupname, theta_axis, theta_inner, phi)
string graphname
string groupname
variable theta_axis
variable theta_inner
variable phi
variable r_axis = calc_graph_radius(theta_axis)
variable r_inner = calc_graph_radius(theta_inner)
variable r_outer = calc_graph_radius(2 * theta_axis - theta_inner)
SetDrawEnv push
SetDrawLayer UserFront
DrawAction getgroup=$groupname, delete
SetDrawEnv gstart, gname=$groupname
variable xc, yc, xr, yr
// cone periphery
variable r_center = (r_outer + r_inner) / 2
variable r_radius = (r_outer - r_inner) / 2
xc = r_center * cos(phi * pi / 180)
yc = r_center * sin(phi * pi / 180)
xr = r_radius
yr = r_radius
SetDrawEnv xcoord=HorizCrossing, ycoord=VertCrossing
SetDrawEnv dash=11, fillpat=0
DrawOval xc - xr, yc - yr, xc + xr, yc + yr
// cone axis
xc = r_axis * cos(phi * pi / 180)
yc = r_axis * sin(phi * pi / 180)
r_radius = calc_graph_radius(2)
xr = r_radius
yr = r_radius
SetDrawEnv xcoord=HorizCrossing, ycoord=VertCrossing
SetDrawEnv fillfgc=(0,0,0)
DrawOval xc - xr, yc - yr, xc + xr, yc + yr
SetDrawEnv gstop
SetDrawEnv pop
end
/// display a polar graph with lines indicating the angles covered by an angle scan.
///
/// @param nickname nick name for output data.
@ -2021,7 +2220,13 @@ function hemi_add_aziscan(nickname, values, polar, azi, [weights])
w_values[d1, d1 + nd - 1] = w_totals[p] / w_weights[p]
end
/// interpolate a hemispherical scan onto a rectangular grid
///
/// @warning experimental
/// this function has been tested for one specific set of scan parameters.
/// the interface and code may change at any time.
/// the function depends on the ster_x and ster_y waves that are created by display_hemi_scan.
///
function interpolate_hemi_scan(nickname)
string nickname
@ -2044,21 +2249,26 @@ function interpolate_hemi_scan(nickname)
wave ster_x = $s_ster_x
wave ster_y = $s_ster_y
wavestats /q/m=0 ster_x
variable x0 = v_min
variable min_ster_x = wavemin(ster_x)
variable max_ster_x = wavemax(ster_x)
variable x0 = min_ster_x
variable xn = 181
variable dx = (v_max - v_min) / (xn - 1)
make /n=(v_npnts, 3) /free triplet
variable dx = (max_ster_x - min_ster_x) / (xn - 1)
make /n=(numpnts(ster_x), 3) /free triplet
triplet[][0] = ster_x[p]
triplet[][1] = ster_y[p]
triplet[][2] = values[p]
//ImageInterpolate /stw /s={x0, dx, xn, x0, dx, xn} voronoi triplet
make /n=(181, 181) /d /o $(s_prefix + "matrix") /wave=matrix
make /n=(181, 181) /free mnorm
variable size = 181
make /n=(size, size) /d /o $(s_prefix + "matrix") /wave=matrix
make /n=(size, size) /free mnorm
ImageFromXYZ /as {ster_x, ster_y, values}, matrix, mnorm
matrix /= mnorm
//matrixfilter NanZapMedian, matrix
matrixfilter NanZapMedian, matrix
matrixfilter gauss, matrix
matrix = (x^2 + y^2) < 4 ? matrix : nan
end
/// map angle scan data onto a rectangular grid in stereographic projection
@ -2234,3 +2444,44 @@ function load_hemi_scan(nickname, pathname, filename)
setdatafolder saveDF
end
/// import a hemispherical scan from theta-phi-intensity waves and display it
///
/// @warning EXPERIMENTAL
/// the interface and behaviour of this function may change
///
function import_tpi_scan(nickname, theta, phi, intensity, [folding, npolar, nograph, xpdplot])
string nickname
wave theta
wave phi
wave intensity
variable folding
variable npolar
variable nograph
variable xpdplot
if (ParamIsDefault(npolar))
npolar = 91
endif
if (ParamIsDefault(nograph))
nograph = 0
endif
if (ParamIsDefault(folding))
folding = 1
endif
if (ParamIsDefault(xpdplot))
xpdplot = 0
endif
make_hemi_grid(npolar, nickname, xpdplot=xpdplot)
variable ifold
duplicate /free phi, fold_phi
for (ifold = 0; ifold < folding; ifold += 1)
hemi_add_anglescan(nickname, intensity, theta, fold_phi)
fold_phi = fold_phi >= 180 ? fold_phi + 360 / folding - fold_phi : fold_phi + 360 / folding
endfor
display_hemi_scan(nickname)
end

View File

@ -3,23 +3,35 @@
#pragma ModuleName = PearlAreaImport
#pragma version = 1.06
#include <HDF5 Browser>
#include "pearl-gui-tools", version >= 1.01
#include "pearl-gui-tools"
// HDF5 file import from EPICS area detectors
// such as CCD cameras, 2D electron analysers
// copyright (c) 2013-16 Paul Scherrer Institut
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http:///www.apache.org/licenses/LICENSE-2.0
// restriction: as of version 6.3, Igor can open datasets of up to rank 4.
// i.e. the extra dimension Y of the file plugin cannot be used.
// the extra dimensions N and X are supported.
/// @file
/// @brief HDF5 file import from EPICS area detectors
/// @ingroup ArpesPackage
///
///
/// HDF5 file import from EPICS area detectors
/// such as CCD cameras, 2D electron analysers
///
/// as of Igor 6.3, Igor can open datasets of up to rank 4.
/// i.e. the extra dimension Y of the file plugin cannot be used.
/// the extra dimensions N and X are supported.
// created: matthias.muntwiler@psi.ch, 2013-05-31
// Copyright (c) 2013 Paul Scherrer Institut
// $Id$
/// @namespace PearlAreaImport
/// @brief HDF5 file import from EPICS area detectors
///
/// PearlAreaImport is declared in @ref pearl-area-import.ipf.
//------------------------------------------------------------------------------
/// callback function for drag&drop of HDF5 files into Igor.
///
static function BeforeFileOpenHook(refNum,fileName,path,type,creator,kind)
// allows drag&drop of data files into an open igor window
// this works only with igor 5.02 or later
variable refNum, kind
string fileName, path, type, creator
@ -57,20 +69,37 @@ static function BeforeFileOpenHook(refNum,fileName,path,type,creator,kind)
return handledOpen // 1 tells Igor not to open the file
End
/// generate the name of a data folder based on a file name.
///
/// if the file name follows the naming convention source-date-index.extension,
/// the function tries to generate the nick name as source_date_index.
/// otherwise it's just a cleaned up version of the file name.
///
/// date must be in yyyymmdd or yymmdd format and is clipped to the short yymmdd format.
/// index should be a running numeric index of up to 6 digits, or the time encoded as hhmmss.
/// however, in the current version index can be any string that can be a valid Igor folder name.
///
/// @param filename file name, including extension. can also include a folder path (which is ignored).
/// the extension is currently ignored, but may be used to select the parent folder in a later version.
/// @param ignoredate if non-zero, the nick name will not include the date part.
/// defaults to zero.
/// @param sourcename nick name of the data source.
/// by default, the function tries to detect the source from the file name.
/// this option can be used to override auto-detection.
/// the automatic source names are:
/// sci (scienta by area detector),
/// psh (pshell),
/// sl (optics slit camera by area detector),
/// es (end station camera by area detector),
/// xy (unidentified).
/// @param unique if non-zero, the resulting name is made a unique data folder name in the current data folder
/// defaults to zero.
///
function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique])
// suggests the name of a data folder based on a file name
// if the file name follows the naming convention source-date-index.extension,
// the function tries to generate the nick name as source_date_index.
// otherwise it's just a cleaned up version of the file name.
string filename // file name, including extension. can also include a folder path (which is ignored)
// the extension is currently ignored, but may be used later to select the parent folder
variable ignoredate // if non-zero, the nick name will not include the date part
// defaults to zero
string sourcename // nick name of the data source
// the function tries to detect the source from the file name
// this option can be used to override auto-detection
variable unique // if non-zero, the resulting name is made a unique data folder name in the current data folder
// defaults to zero
string filename
variable ignoredate
string sourcename
variable unique
if (ParamIsDefault(ignoredate))
ignoredate = 0
@ -86,6 +115,8 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique])
string autosource
if (strsearch(basename, "scienta", 0, 2) >= 0)
autosource = "sci"
elseif (strsearch(basename, "pshell", 0, 2) >= 0)
autosource = "psh"
elseif (strsearch(basename, "OP-SL", 0, 2) >= 0)
autosource = "sl"
elseif (strsearch(basename, "ES-PS", 0, 2) >= 0)
@ -99,8 +130,12 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique])
variable nparts = ItemsInList(basename, "-")
if (nparts >= 3)
string datepart = StringFromList(nparts - 2, basename, "-")
string indexpart = StringFromList(nparts - 1, basename, "-")
string datepart = StringFromList(1, basename, "-")
variable l_datepart = strlen(datepart)
if (l_datepart == 8)
datepart = datepart[l_datepart-6, l_datepart-1]
endif
string indexpart = StringFromList(2, basename, "-")
if (ignoredate)
sprintf nickname, "%s_%s", sourcename, indexpart
else
@ -117,10 +152,12 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique])
return nickname
end
/// load area detector data files selected in a file dialog window
///
/// @param APathName Igor symbolic path name.
/// if empty, Igor will choose a folder on its own
function ad_load_dialog(APathName)
// loads data files selected in a file dialog window
string APathName // igor symbolic path name
// if empty, Igor will choose a folder on its own
string APathName
variable refNum
string message = "Select data files"
@ -148,17 +185,23 @@ function ad_load_dialog(APathName)
setdatafolder saveDF
end
//------------------------------------------------------------------------------
/// import everything from a HDF5 file created by the Area Detector software.
///
/// if the data is from the electron analyser driver and some special attributes are included,
/// the function will set the scales of the image dimensions.
///
/// @param ANickName destination folder name (top level under root)
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
/// @param AFileName if empty a dialog box shows up
/// @param load_data 1 (default): load data; 0: do not load data
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
/// for proper wave scaling, the attributes must be loaded
function /s adh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr])
// this function loads everything from a HDF5 file created by the Area Detector software.
// if the data is from the electron analyser driver and some special attributes are included,
// the function will set the scales of the image dimensions.
string ANickName // destination folder name (top level under root)
string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
string AFileName // if empty a dialog box shows up
variable load_data // 1 (default): load data; 0: do not load data
variable load_attr // 1 (default): load attributes; 0: do not load attributes
// for proper wave scaling, the attributes must be loaded
string ANickName
string APathName
string AFileName
variable load_data
variable load_attr
if (ParamIsDefault(load_data))
load_data = 1
@ -222,30 +265,43 @@ function /s adh5_load_complete(ANickName, APathName, AFileName, [load_data, load
return AFileName
end
/// load and reduce a dataset from a HDF5 file created by the Area Detector software.
///
/// the resulting dataset is reduced in one image dimension by a user-defined reduction function,
/// e.g. by region-of-interest integration, curve fitting, etc.
///
/// the function loads the dataset image by image using the hyperslab option
/// and applies a custom reduction function to each image.
/// the results from the reduction function are composed into one result wave.
/// the raw data are discarded.
///
/// if the data is from the electron analyser driver and some special attributes are included,
/// the function will set the scales of the image dimensions.
///
/// @param ANickName destination folder name (top level under root)
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
/// @param AFileName if empty a dialog box shows up
///
/// @param reduction_func custom reduction function
/// (any user-defined function which has the same parameters as adh5_default_reduction())
/// @param reduction_param parameter string for the reduction function
///
/// @param load_data 1 (default): load data; 0: do not load data
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
/// for proper wave scaling, the attributes must be loaded
/// @param progress 1 (default): show progress window; 0: do not show progress window
///
function /s adh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [load_data, load_attr, progress])
// this function loads a reduced dataset from a HDF5 file created by the Area Detector software.
// the resulting dataset is reduced in one image dimension by a user-defined reduction function,
// e.g. by region-of-interest integration, curve fitting, etc.
string ANickName
string APathName
string AFileName
// the function loads the dataset image by image using the hyperslab option
// and applies a custom reduction function to each image.
// the results from the reduction function are composed into one result wave.
// the raw data are discarded.
funcref adh5_default_reduction reduction_func
string reduction_param
// if the data is from the electron analyser driver and some special attributes are included,
// the function will set the scales of the image dimensions.
string ANickName // destination folder name (top level under root)
string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
string AFileName // if empty a dialog box shows up
funcref adh5_default_reduction reduction_func // custom reduction function
// (any user-defined function which has the same parameters as adh5_default_reduction())
string reduction_param // parameter string for the reduction function
variable load_data // 1 (default): load data; 0: do not load data
variable load_attr // 1 (default): load attributes; 0: do not load attributes
// for proper wave scaling, the attributes must be loaded
variable progress // 1 (default): show progress window; 0: do not show progress window
variable load_data
variable load_attr
variable progress
if (ParamIsDefault(load_data))
load_data = 1
@ -306,18 +362,26 @@ function /s adh5_load_reduced(ANickName, APathName, AFileName, reduction_func, r
return AFileName
end
/// load a single image from a HDF5 file created by the Area Detector software.
///
/// the data wave is loaded into the current data folder.
/// attributes are loaded into the attr subfolder. existing waves in attr are deleted.
///
/// @warning EXPERIMENTAL
/// this function uses the root:pearl_area:preview data folder. existing data there may be deleted!
///
/// @param ANickName destination wave name. the wave is created in the current data folder.
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
/// @param AFileName if empty a dialog box shows up
/// @param load_data 1 (default): load data; 0: do not load data
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
/// note: for correct scaling of the image, the attributes need to be loaded
function /s adh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr])
// this function loads one image from a HDF5 file created by the Area Detector software.
// the data wave is loaded into the current data folder.
// attributes are loaded into the attr subfolder. existing waves in attr are deleted.
// EXPERIMENTAL
// this function uses the root:pearl_area:preview data folder. existing data there may be deleted!
string ANickName // destination wave name. the wave is created in the current data folder.
string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
string AFileName // if empty a dialog box shows up
variable load_data // 1 (default): load data; 0: do not load data
variable load_attr // 1 (default): load attributes; 0: do not load attributes
// note: for correct scaling of the image, the attributes need to be loaded
string ANickName
string APathName
string AFileName
variable load_data
variable load_attr
if (ParamIsDefault(load_data))
load_data = 1
@ -412,13 +476,19 @@ function /s adh5_load_preview(ANickName, APathName, AFileName, [load_data, load_
return AFileName
end
/// load descriptive info from a HDF5 file created by the Area Detector software.
///
/// the information returned is the array size and active scans
///
/// @attention EXPERIMENTAL
/// this function should be merged with adh5_load_preview
///
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
/// @param AFileName if empty a dialog box shows up
///
function /s adh5_load_info(APathName, AFileName)
// this function loads descriptive info from a HDF5 file created by the Area Detector software.
// the information returned is the array size and active scans
// EXPERIMENTAL
// this function should be merged with adh5_load_preview
string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
string AFileName // if empty a dialog box shows up
string APathName
string AFileName
dfref saveDF = GetDataFolderDFR()
@ -492,12 +562,17 @@ function /s adh5_load_info(APathName, AFileName)
return s_info
end
/// load the detector dataset from the open HDF5 file.
///
/// the function loads the whole dataset at once
/// and redimensions it so that the image dimensions are X and Y
///
/// @param fileID ID of open HDF5 file from HDF5OpenFile
/// @param detectorpath path to detector group in the HDF5 file
///
function adh5_load_detector(fileID, detectorpath)
// loads the detector dataset from the open HDF5 file
// the function loads the whole dataset at once
// and redimensions it so that the image dimensions are X and Y
variable fileID // ID of open HDF5 file from HDF5OpenFile
string detectorpath // path to detector group in the HDF5 file
variable fileID
string detectorpath
// avoid compilation error if HDF5 XOP has not been loaded
#if Exists("HDF5LoadData")
@ -526,19 +601,22 @@ function adh5_load_detector(fileID, detectorpath)
#endif
end
/// redimension a multi-dimensional area detector array loaded from HDF5.
///
/// so that the image dimensions are X and Y
/// singleton dimensions are removed (most common cases only)
///
/// in the redimensioned array, the original dimension type is noted in the dimension label:
/// AD_Dim0 = first image dimension
/// AD_Dim1 = second image dimension
/// AD_DimN = frame sequence
/// AD_DimX = extra dimension X
/// AD_DimY = extra dimension Y (cannot be loaded in Igor)
///
/// @param data area detector data loaded from HDF5 to be redimensioned
///
function adh5_redim(data)
// redimensions a multi-dimensional area detector array loaded from HDF5
// so that the image dimensions are X and Y
// singleton dimensions are removed (most common cases only)
// in the redimensioned array, the original dimension type is noted in the dimension label:
// AD_Dim0 = first image dimension
// AD_Dim1 = second image dimension
// AD_DimN = frame sequence
// AD_DimX = extra dimension X
// AD_DimY = extra dimension Y (cannot be loaded in Igor)
wave data // area detector data loaded from HDF5 to be redimensioned
wave data
duplicate /free data, tempdata
variable nd = wavedims(tempdata)
@ -608,29 +686,36 @@ function adh5_redim(data)
endswitch
end
/// find the attributes data folder of an area detector dataset.
///
/// since version 1.04 attributes should be stored in a subfolder named attr.
/// earlier versions had the attributes in the same data folder as the actual dataset.
///
/// @param data wave containing the main dataset.
///
/// @return data folder reference of the attributes folder.
/// the reference may be invalid (and default to root) if the folder cannot be found,
/// cf. built-in DataFolderRefStatus function.
static function /DF GetAttrDataFolderDFR(data)
// returns a data folder reference to the ND attributes
// since version 1.04 attributes should be written in a subfolder named attr
// earlier versions had the attributes in the same data folder as the actual dataset
wave data
dfref saveDF = GetDataFolderDFR()
dfref dataDF = GetWavesDataFolderDFR(data)
setdatafolder dataDF
if (DataFolderExists(":attr"))
setdatafolder :attr
dfref attrDF = dataDF:attr
if (DataFolderRefStatus(attrDF) == 0)
attrDF = dataDF
endif
dfref attrDF = GetDataFolderDFR()
setdatafolder saveDF
return attrDF
end
/// set the dimension scales of an area detector dataset.
///
/// the intrinsic dimensions 0 and 1 are scaled according to the data source
/// (currently supported: Prosilica cameras, Scienta electron analyser).
/// the extra dimensions are scaled according to the scan.
/// the latter requires that the positioner names and position values are available.
///
function adh5_scale(data,[source])
// tries to set the dimension scales of an area detector dataset.
// the intrinsic dimensions 0 and 1 are scaled according to the data source
// (currently supported: Prosilica cameras, Scienta electron analyser).
// the extra dimensions are scaled according to the scan.
// the latter requires that the positioner names and position values are available.
wave data
string source
@ -671,14 +756,23 @@ function adh5_scale(data,[source])
setdatafolder saveDF
end
/// load the detector dataset from the open HDF5 file.
///
/// the function loads the dataset image by image using the hyperslab option.
/// this function gives the same result as adh5_load_detector.
/// it is about 5% slower, and it depends on HDF5 Browser code.
/// but it does not choke on large datasets (as long as the final wave fits into memory).
///
/// @param fileID ID of open HDF5 file from HDF5OpenFile.
/// @param detectorpath path to detector group in the HDF5 file.
/// @param progress 1 (default): show progress window; 0: do not show progress window.
///
/// @return 0 if successful, non-zero if an error occurred.
///
function adh5_load_detector_slabs(fileID, detectorpath, [progress])
// loads the detector dataset from the open HDF5 file
// the function loads the dataset image by image using the hyperslab option
// this function gives the same result as adh5_load_detector
// it is about 5% slower, and it depends on HDF5 Browser code.
variable fileID // ID of open HDF5 file from HDF5OpenFile
string detectorpath // path to detector group in the HDF5 file
variable progress // 1 (default): show progress window; 0: do not show progress window
variable fileID
string detectorpath
variable progress
if (ParamIsDefault(progress))
progress = 1
@ -821,23 +915,32 @@ function adh5_load_detector_slabs(fileID, detectorpath, [progress])
return result
end
/// load a single image from the detector dataset of the open HDF5 file
///
/// the function can average over a region in the extra dimensions.
///
/// @param fileID ID of open HDF5 file from HDF5OpenFile
/// @param detectorpath path to detector group in the HDF5 file
/// @param dim2start 2nd dimension coordinate of the first image
/// note that the order of dimensions is reversed in the file
/// 2nd dimension = N dimension in area detector = dimension 0 of the three-dimensional HDF dataset
/// set to 0 if dimension may not be present
/// @param dim2count number of subsequent images to average
/// set to 1 if dimension may not be present
/// @param dim3start 3rd dimension coordinate of the first image
/// note that the order of dimensions is reversed in the file
/// 3rd dimension = extra X dimension in area detector = dimension 0 of the four-dimensional HDF dataset
/// set to 0 if dimension may not be present
/// @param dim3count number of subsequent images to average
/// set to 1 if dimension may not be present
///
function adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, dim3start, dim3count)
// loads a single image from the detector dataset of the open HDF5 file
// the function can average over a region in the extra dimensions
variable fileID // ID of open HDF5 file from HDF5OpenFile
string detectorpath // path to detector group in the HDF5 file
variable dim2start // 2nd dimension coordinate of the first image
// note that the order of dimensions is reversed in the file
// 2nd dimension = N dimension in area detector = dimension 0 of the three-dimensional HDF dataset
// set to 0 if dimension may not be present
variable dim2count // number of subsequent images to average
// set to 1 if dimension may not be present
variable dim3start // 3rd dimension coordinate of the first image
// note that the order of dimensions is reversed in the file
// 3rd dimension = extra X dimension in area detector = dimension 0 of the four-dimensional HDF dataset
// set to 0 if dimension may not be present
variable dim3count // number of subsequent images to average
// set to 1 if dimension may not be present
variable fileID
string detectorpath
variable dim2start
variable dim2count
variable dim3start
variable dim3count
// avoid compilation error if HDF5 XOP has not been loaded
#if Exists("HDF5LoadData")
@ -922,10 +1025,12 @@ function adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, di
#endif
end
/// get a list of functions which can be used as reduction functions.
///
/// the function evaluates only the function arguments,
/// it may thus include functions which are not suitable as reduction functions.
///
function /s adh5_list_reduction_funcs()
// returns a list of functions which can be used as reduction functions
// the function evaluates only the function arguments,
// it may thus include functions which are not suitable as reduction functions.
string all_funcs = FunctionList("*", ";", "KIND:6,NPARAMS:4,VALTYPE:1")
string result = ""
@ -960,26 +1065,35 @@ function /s adh5_list_reduction_funcs()
return result
end
/// function prototype for adh5_load_reduced_detector
///
/// derived functions reduce a two-dimensional dataset to a one-dimensional dataset,
/// e.g. by ROI-integration, curve fitting, etc.
// the resulting wave must have the same size as either dimension of the source image.
///
/// each destination wave is a one-dimensional intensity distribution.
/// the function must redimension each of these waves to one of the image dimensions
/// by calling the adh5_setup_profile() function.
/// this function will also copy the scale information and dimension labels,
/// which is important for the proper scaling of the result.
///
/// the meaning of the data in dest1 and dest2 is up to the particular function,
/// e.g. dest1 could hold the mean value and dest2 the one-sigma error,
/// or dest1 could hold the X-profile, and dest2 the Y-profile.
///
/// @param source source wave
/// two-dimensional intensity distribution (image)
/// @param dest1, dest2 destination waves
/// @param param string with optional parameters, shared between calls.
/// this is a pass-by-reference argument,
/// the function may modify the string
///
/// @return zero if successful, non-zero if an error occurs.
///
threadsafe function adh5_default_reduction(source, dest1, dest2, param)
// function prototype for adh5_load_reduced_detector
// derived functions reduce a two-dimensional dataset to a one-dimensional dataset,
// e.g. by ROI-integration, curve fitting, etc.
// the resulting wave must have the same size as either dimension of the source image.
wave source // source wave
// two-dimensional intensity distribution (image)
wave dest1, dest2 // destination waves
// each wave is a one-dimensional intensity distribution
// the function must redimension each of these waves to one of the image dimensions
// by calling the adh5_setup_profile() function.
// this function will also copy the scale information and dimension labels,
// which is important for the proper scaling of the result.
// the meaning of the data in dest1 and dest2 is up to the particular function,
// e.g. dest1 could hold the mean value and dest2 the one-sigma error,
// or dest1 could hold the X-profile, and dest2 the Y-profile.
string &param // string with optional parameters, shared between calls
// this is a pass-by-reference argument,
// the function may modify the string
wave source
wave dest1, dest2
string &param
// demo code
// integrate along the dimensions
@ -988,13 +1102,15 @@ threadsafe function adh5_default_reduction(source, dest1, dest2, param)
adh5_setup_profile(source, dest2, 1)
ad_profile_y_w(source, 0, -1, dest2)
return 0 // return zero if successful, non-zero if an error occurs
return 0
end
/// set up a one-dimensional wave for a line profile based on a 2D original wave.
///
/// redimensions the profile wave to the given dimension.
/// copies the scale and dimension label of the given dimension.
///
threadsafe function adh5_setup_profile(image, profile, dim)
// sets up a one-dimensional wave for a line profile based on a 2D original wave
// redimensions the profile wave to the given dimension
// copies the scale and dimension label of the given dimension
wave image // prototype
wave profile // destination wave
variable dim // which dimension to keep: 0 = X, 1 = Y
@ -1005,10 +1121,11 @@ threadsafe function adh5_setup_profile(image, profile, dim)
setdimlabel 0, -1, $getdimlabel(image, dim, -1), profile
end
/// wrapper function for testing reduction functions from the command line.
///
/// Igor does not allow global variables as pass-by-reference parameter for reduction_param.
///
function /s adh5_test_reduction_func(source, dest1, dest2, reduction_func, reduction_param)
// wrapper function for testing reduction functions from the command line.
// Igor does not allow global variables as pass-by-reference parameter for reduction_param.
wave source
wave dest1
wave dest2
@ -1020,28 +1137,35 @@ function /s adh5_test_reduction_func(source, dest1, dest2, reduction_func, reduc
return reduction_param
end
/// load a reduced detector dataset from the open HDF5 file.
///
/// the function loads the dataset image by image using the hyperslab option
/// and applies a custom reduction function to each image.
/// the results from the reduction function are composed into one result wave.
/// the raw data are discarded.
///
/// by default, the reduction function is called in separate threads to reduce the total loading time.
/// (see the global variable adh5_perf_secs which reports the total run time of the function.)
/// the effect varies depending on the balance between file loading (image size)
/// and data processing (complexity of the reduction function).
/// for debugging the reduction function, multi-threading can be disabled.
///
/// @param fileID ID of open HDF5 file from HDF5OpenFile
/// @param detectorpath path to detector group in the HDF5 file
/// @param reduction_func custom reduction function
/// (any user-defined function which has the same parameters as adh5_default_reduction())
/// @param reduction_param parameter string for the reduction function
/// @param progress 1 (default): show progress window; 0: do not show progress window
/// @param nthreads -1 (default): use as many threads as there are processor cores (in addition to main thread)
/// 0: use main thread only (e.g. for debugging the reduction function)
/// >= 1: use a fixed number of (additional) threads
function adh5_load_reduced_detector(fileID, detectorpath, reduction_func, reduction_param, [progress, nthreads])
// loads a reduced detector dataset from the open HDF5 file
// the function loads the dataset image by image using the hyperslab option
// and applies a custom reduction function to each image.
// the results from the reduction function are composed into one result wave.
// the raw data are discarded.
// by default, the reduction function is called in separate threads to reduce the total loading time.
// (see the global variable adh5_perf_secs which reports the total run time of the function.)
// the effect varies depending on the balance between file loading (image size)
// and data processing (complexity of the reduction function).
// for debugging the reduction function, multi-threading can be disabled.
variable fileID // ID of open HDF5 file from HDF5OpenFile
string detectorpath // path to detector group in the HDF5 file
funcref adh5_default_reduction reduction_func // custom reduction function
// (any user-defined function which has the same parameters as adh5_default_reduction())
string reduction_param // parameter string for the reduction function
variable progress // 1 (default): show progress window; 0: do not show progress window
variable nthreads // -1 (default): use as many threads as there are processor cores (in addition to main thread)
// 0: use main thread only (e.g. for debugging the reduction function)
// >= 1: use a fixed number of (additional) threads
variable fileID
string detectorpath
funcref adh5_default_reduction reduction_func
string reduction_param
variable progress
variable nthreads
if (ParamIsDefault(progress))
progress = 1
@ -1340,17 +1464,21 @@ threadsafe static function reduce_slab_image(slabdata, image, profile1, profile2
return reduction_func(image, profile1, profile2, reduction_param)
end
/// load an NDAttributes group from an open HDF5 file into the current data folder.
///
/// datasets contained in the group are loaded as waves.
/// if a dataset contains only one data point, it is added to the IN, ID, IV, IU waves,
/// where IN = EPICS channel name, ID = attribute name, IV = value, IU = unit
/// (units are left empty as they are not saved in HDF5).
/// attributes of the NDAttributes group are added to the IN, ID, IV, IU waves,
/// however, IN and IU are left empty as this information is not saved in the HDF5 file.
///
/// @param fileID ID of open HDF5 file from HDF5OpenFile
/// @param attributespath path to NDAttributes group in the HDF5 file
///
function adh5_loadattr_all(fileID, attributespath)
// loads an NDAttributes group from an open HDF5 file into the current data folder.
// datasets contained in the group are loaded as waves.
// if a dataset contains only one data point, it is added to the IN, ID, IV, IU waves,
// where IN = EPICS channel name, ID = attribute name, IV = value, IU = unit
// (units are left empty as they are not saved in HDF5).
// attributes of the NDAttributes group are added to the IN, ID, IV, IU waves,
// however, IN and IU are left empty as this information is not saved in the HDF5 file.
variable fileID // ID of open HDF5 file from HDF5OpenFile
string attributespath // path to NDAttributes group in the HDF5 file
variable fileID
string attributespath
string datasetname
string datawavename
@ -1417,11 +1545,20 @@ function adh5_loadattr_all(fileID, attributespath)
end
/// sub-function of adh5_loadattr_all.
///
/// reads one attribute from a wave which was loaded from an HDF5 file into the info waves IN, ID, IV, IU.
/// the attribute is read only if the input wave contains exactly one item,
/// i.e. either the measurement is a single image, or the attribute has string type.
///
/// @param datawavename name of the attribute wave in the current folder.
/// can be text or numeric.
/// @param source source identifier (EPICS name) of the attribute.
/// @param idest destination index in IN, ID, IV, IU where the results are written.
/// the variable is incremented if data was written, otherwise it is left unchanged.
/// make sure IN, ID, IV, IU have at least idest + 1 elements.
///
static function read_attribute_info(datawavename, source, idest)
// sub-function of adh5_loadattr_all.
// reads one attribute from a wave which was loaded from an HDF5 file into the info waves IN, ID, IV, IU.
// the attribute is read only if the input wave contains exactly one item,
// i.e. either the measurement is a single image, or the attribute has string type.
string datawavename // name of the attribute wave in the current folder.
// can be text or numeric.
string source
@ -1467,12 +1604,14 @@ static function read_attribute_info(datawavename, source, idest)
endif
end
/// set the energy and angle scales of an area detector dataset from the Scienta analyser.
///
/// the dimension labels of the energy and angle scales must be set correctly:
/// AD_Dim0 = energy dimension; AD_Dim1 = angle dimension.
/// these dimensions must be the first two dimensions of a multi-dimensional dataset.
/// normally, AD_Dim0 is the X dimension, and AD_Dim1 the Y dimension.
///
function adh5_scale_scienta(data)
// sets the energy and angle scales of an area detector dataset from the Scienta analyser
// the dimension labels of the energy and angle scales must be set correctly:
// AD_Dim0 = energy dimension; AD_Dim1 = angle dimension
// these dimensions must be the first two dimensions of a multi-dimensional dataset.
// normally, AD_Dim0 is the X dimension, and AD_Dim1 the Y dimension.
wave data
dfref saveDF = GetDataFolderDFR()
@ -1571,11 +1710,13 @@ function adh5_scale_scienta(data)
setdatafolder saveDF
end
/// scales the extra dimensions of an area detector dataset according to the EPICS scan
///
/// the scan positioner name and its values must be available
///
/// @todo incomplete
///
function adh5_scale_scan(data)
// scales the extra dimensions of an area detector dataset according to the EPICS scan
// the scan positioner name and its values must be available
// TODO: incomplete
wave data
dfref saveDF = GetDataFolderDFR()

View File

@ -5,6 +5,7 @@
#include "pearl-area-display" // 2D and 3D data visualization
#include "pearl-area-profiles" // data processing for multi-dimensional datasets
#include "pearl-area-import" // import data files generated by area detector software
#include "pearl-pshell-import"
#include "pearl-data-explorer" // preview and import panel for PEARL data
#include "pearl-anglescan-process"
#include "pearl-anglescan-tracker" // live preview of hemispherical angle scan

View File

@ -1,36 +1,51 @@
#pragma rtGlobals=3 // Use modern global access method and strict wave access.
#pragma IgorVersion = 6.1
#pragma ModuleName = PearlDataExplorer
#pragma version = 1.41
#include "pearl-area-import", version >= 1.06
#include "pearl-area-profiles", version >= 1.04
#include "pearl-area-display", version >= 1.04
// preview and import panel for PEARL data:
// scienta analyser, prosilica cameras, s-scans, otf-scans
// $Id$
// author: matthias.muntwiler@psi.ch
// Copyright (c) 2013-14 Paul Scherrer Institut
#pragma version = 1.43
#include "pearl-area-import"
#include "pearl-area-profiles"
#include "pearl-area-display"
#include "pearl-pshell-import"
// copyright (c) 2013-16 Paul Scherrer Institut
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// http:///www.apache.org/licenses/LICENSE-2.0
/// @file
/// @brief preview and import panel for PEARL data
/// @ingroup ArpesPackage
///
///
/// preview and import panel for PEARL data:
/// scienta analyser, prosilica cameras, s-scans, otf-scans
/// @namespace PearlDataExplorer
/// @brief preview and import panel for PEARL data
///
/// PearlDataExplorer is declared in @ref pearl-data-explorer.ipf.
static strconstant package_name = "pearl_explorer"
static strconstant package_path = "root:packages:pearl_explorer:"
static strconstant ks_filematch_adh5 = "*.h5"
static strconstant ks_filematch_pshell = "psh*.h5"
static strconstant ks_filematch_itx = "*.itx"
function pearl_data_explorer()
init_package()
load_prefs()
execute /q/z "PearlDataExplorer()"
end
/// initialize the global variables of the data explorer.
///
/// initializes the global variables and data folder for this procedure file
/// must be called once before the panel is created
/// warning: this function overwrites previous values
static function init_package()
// initializes the global variables and data folder for this procedure file
// must be called once before the panel is created
// warning: this function overwrites previous values
dfref savefolder = GetDataFolderDFR()
SetDataFolder root:
@ -64,6 +79,7 @@ static function init_package()
string /g s_preview_source = "" // data source, e.g. EPICS channel name, of the current preview
string /g s_profiles_graph = "" // window name of the current preview if the data is two-dimensional
string /g s_preview_trace_graph = "" // window name of the current preview if the data is one-dimensional
string /g s_file_info = "" // description of selected file
variable/g v_InitPanelDone = 1
@ -201,9 +217,11 @@ static function preview_file(filename)
dfref saveDF = GetDataFolderDFR()
if (StringMatch(filename, "*.h5"))
if (StringMatch(filename, ks_filematch_pshell))
wave /z image = preview_pshell_file(filename)
elseif (StringMatch(filename, ks_filematch_adh5))
wave /z image = preview_hdf_file(filename)
elseif (StringMatch(filename, "*.itx"))
elseif (StringMatch(filename, ks_filematch_itx))
wave /z image = preview_itx_file(filename)
endif
@ -224,6 +242,49 @@ static function preview_file(filename)
setdatafolder saveDF
end
/// load the preview of a PShell HDF5 file (not implemented).
///
/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview().
/// the preview is loaded to the preview_image wave in the pear_explorer data folder.
///
/// the s_file_info string is updated with information about the scan dimensions.
///
/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object.
///
/// @return wave reference of the preview image
///
static function /wave preview_pshell_file(filename)
string filename
dfref saveDF = GetDataFolderDFR()
setdatafolder $package_path
svar s_preview_file
svar s_preview_source
psh5_load_preview("preview_image", "pearl_explorer_filepath", filename)
s_preview_file = filename
s_preview_source = ""
wave /z preview_image
svar /z s_file_info
if (! svar_exists(s_file_info))
string /g s_file_info
endif
if (strlen(s_preview_file) > 0)
s_file_info = psh5_load_info("pearl_explorer_filepath", filename)
else
s_file_info = ""
endif
if (DataFolderExists("attr"))
setdatafolder attr
preview_attributes(GetDataFolderDFR())
setdatafolder ::
endif
setdatafolder saveDF
return preview_image
end
/// load the preview of a PEARL HDF5 file.
///
/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview().
@ -777,13 +838,19 @@ static function load_file(filename, [options])
dfref saveDF = GetDataFolderDFR()
if (StringMatch(filename, "*.h5"))
if (StringMatch(filename, ks_filematch_pshell))
if (ParamIsDefault(options))
load_pshell_file(filename)
else
load_pshell_file(filename, options=options)
endif
elseif (StringMatch(filename, ks_filematch_adh5))
if (ParamIsDefault(options))
load_hdf_file(filename)
else
load_hdf_file(filename, options=options)
endif
elseif (StringMatch(filename, "*.itx"))
elseif (StringMatch(filename, ks_filematch_itx))
load_itx_file(filename)
endif
@ -796,7 +863,7 @@ static function prompt_hdf_options(options)
string mode = StringByKey("mode", options, ":", ";")
string reduction_func = StringByKey("reduction_func", options, ":", ";")
string modes = "adh5_load_reduced"
string modes = "load_reduced"
string reduction_functions = adh5_list_reduction_funcs()
if (strlen(mode) == 0)
@ -817,17 +884,19 @@ static function prompt_hdf_options(options)
return v_flag // 0 = OK, 1 = cancel
end
/// prototype for prompting for processing function parameters.
///
/// the function should prompt the user for function parameters,
/// and update the param argument if the user clicked OK.
/// returns 0 if the user clicked OK, 1 if the user cancelled.
///
/// prompt functions must have the same name as the corresponding reduction function
/// with the prefix "prompt_".
/// be aware of the limited length of function names in Igor.
///
/// this function is a prototype. it does nothing but returns OK.
///
function prompt_default_process(param)
// prototype for prompting for processing function parameters.
// the function should prompt the user for function parameters,
// and update the param argument if the user clicked OK.
// returns 0 if the user clicked OK, 1 if the user cancelled.
// prompt functions must have the same name as the corresponding reduction function
// with the prefix "prompt_".
// be aware of the limited length of function names in Igor.
// this function is a prototype. it does nothing but returns OK.
string &param
return 0
@ -847,6 +916,57 @@ function prompt_func_params(func_name, func_param)
endif
end
static function /df load_pshell_file(filename, [options])
string filename
string options
dfref saveDF = GetDataFolderDFR()
string nickname = ad_suggest_foldername(filename)
string loaded_filename = ""
if (ParamIsDefault(options))
loaded_filename = psh5_load_complete(nickname, "pearl_explorer_filepath", filename)
else
if (strlen(options) == 0)
svar pref_options = $(package_path + "s_hdf_options")
options = pref_options
if (prompt_hdf_options(options) == 0)
// OK
pref_options = options
else
// cancel
options = ""
endif
endif
string mode = StringByKey("mode", options, ":", ";")
strswitch(mode)
case "load_reduced":
string reduction_func = StringByKey("reduction_func", options, ":", ";")
svar pref_params = $(package_path + "s_reduction_params")
string reduction_params = pref_params
if (prompt_func_params(reduction_func, reduction_params) == 0)
pref_params = reduction_params
psh5_load_reduced(nickname, "pearl_explorer_filepath", filename, $reduction_func, reduction_params)
svar s_filepath
loaded_filename = s_filepath
endif
break
endswitch
endif
dfref dataDF
if (strlen(loaded_filename) > 0)
setdatafolder $("root:" + nickname)
dataDF = GetDataFolderDFR()
string /g pearl_explorer_import = "load_pshell_file"
endif
setdatafolder saveDF
return dataDF
end
static function /df load_hdf_file(filename, [options])
string filename
string options
@ -873,7 +993,7 @@ static function /df load_hdf_file(filename, [options])
string mode = StringByKey("mode", options, ":", ";")
strswitch(mode)
case "adh5_load_reduced":
case "load_reduced":
string reduction_func = StringByKey("reduction_func", options, ":", ";")
svar pref_params = $(package_path + "s_reduction_params")
string reduction_params = pref_params

View File

@ -2,6 +2,7 @@
#pragma IgorVersion = 6.2
#pragma ModuleName = PearlFitFuncs
#pragma version = 1.01
#include "mm-physconst", version >= 1.05
// various fit functions for photoelectron spectroscopy
@ -682,7 +683,7 @@ function FermiGaussConv(pw, yw, xw) : FitFunc
variable xd = wavemin(xdw) / oversampling
// calculate gausswave size based on pw[5] and precision variable
variable x0g = pw[5] * precision_g
variable x0g = abs(pw[5]) * precision_g
variable ng = 2 * floor(x0g / xd) + 1
// calculate fermiwave size based on desired range for yw

File diff suppressed because it is too large Load Diff