updates: pshell import, angle-scans, elog

- pshell import: fix units and data scaling.
- pshell import: support new multi-region scans.
- angle scans: add trim function.
- angle scans: update import_tpi_scan function.
- angle scans: fix scales of check waves in normalization.
- area display: new cursor mode for background selection.
- elog: bugfixes (attachment list, check existing logbook).
This commit is contained in:
2017-07-04 11:06:49 +02:00
parent 9a65d26984
commit 80a01f2bdb
8 changed files with 426 additions and 94 deletions

View File

@ -1,7 +1,7 @@
#pragma rtGlobals=3 // Use modern global access method and strict wave access.
#pragma IgorVersion = 6.36
#pragma ModuleName = PearlPShellImport
#pragma version = 1.02
#pragma version = 1.03
#include <HDF5 Browser>
#include "pearl-gui-tools"
#include "pearl-area-import"
@ -309,6 +309,15 @@ function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_
ig = 0
endif
sg = StringFromList(ig, scanpaths)
if (load_attr)
setdatafolder fileDF
newdatafolder /o/s attr
killwaves /a/z
psh5_load_scan_attrs(fileID, sg)
endif
setdatafolder fileDF
dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr, pref_datasets=pref_datasets)
wave /z data = $dataname
@ -320,13 +329,6 @@ function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_
print "no data found in file " + AFileName
endif
if (load_attr)
setdatafolder saveDF
newdatafolder /o/s attr
killwaves /a/z
psh5_load_scan_attrs(fileID, sg)
setdatafolder ::
endif
else
print "no scans found in file " + AFileName
endif
@ -346,6 +348,7 @@ end
///
/// data is loaded into the current data folder.
/// attribute datasets are loaded into sub-folder `attr`.
/// region datasets are loaded into region sub-folders.
/// existing data, if present, is overwritten.
///
/// @param fileID ID of open HDF5 file from psh5_open_file().
@ -382,13 +385,14 @@ function /s psh5_load_scan_complete(fileID, scanpath, [load_data, load_attr])
string wavenames
string attrnames
psh5_load_scan_meta(fileID, scanpath)
if (load_data)
wavenames = psh5_load_scan_data(fileID, scanpath)
endif
if (load_attr)
newdatafolder /s /o attr
attrnames = psh5_load_scan_attrs(fileID, scanpath)
endif
if (load_data)
setdatafolder dataDF
wavenames = psh5_load_scan_data(fileID, scanpath)
endif
if (load_data && load_attr)
setdatafolder dataDF
ps_scale_datasets()
@ -439,18 +443,72 @@ end
///
/// @return semicolon-separated list of dataset paths.
///
function /s psh5_list_scan_datasets(fileID, scanpath)
/// @version since version 1.03 this function returns paths relative to scanpath.
///
function /s psh5_list_scan_datasets(fileID, scanpath, [include_regions])
variable fileID
string scanpath
variable include_regions
if (ParamIsDefault(include_regions))
include_regions = 0
endif
string result
HDF5ListGroup /TYPE=2 /Z fileID, scanpath
result = S_HDF5ListGroup
if (include_regions)
HDF5ListGroup /R /TYPE=2 /Z fileID, scanpath
variable n = ItemsInList(S_HDF5ListGroup)
variable i
string ds
string region_datasets
for (i = 0; i < n; i += 1)
ds = StringFromList(i, S_HDF5ListGroup)
if (StringMatch(ds, "region*/*"))
//region_datasets = psh5_list_scan_datasets(fileID, ReplaceString("//", scanpath + "/" + region, "/"), include_regions=0)
result = AddListItem(ds, result, ";", inf)
endif
endfor
endif
return result
end
/// list regions of a PShell scan group.
///
/// the function returns a list of all region groups of the selected scan.
///
/// @param fileID ID of open HDF5 file from psh5_open_file().
///
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
///
/// @return semicolon-separated list of datagroup paths.
///
function /s psh5_list_scan_regions(fileID, scanpath)
variable fileID
string scanpath
HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath
HDF5ListGroup /TYPE=1 /Z fileID, scanpath
variable n = ItemsInList(S_HDF5ListGroup)
variable i
string result = ""
string s
for (i = 0; i < n; i += 1)
s = StringFromList(i, S_HDF5ListGroup)
if (StringMatch(s, "region*"))
result = AddListItem(s, result, ";", inf)
endif
endfor
return S_HDF5ListGroup
return result
end
/// load all datasets of a PShell scan group.
///
/// data is loaded into the current data folder.
/// region datasets are loaded into the respective region sub-folders.
///
/// this function does not scale the datasets.
/// call ps_scale_datasets() separately.
@ -464,22 +522,18 @@ end
function /s psh5_load_scan_data(fileID, scanpath)
variable fileID
string scanpath
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
variable nds = ItemsInList(datasets)
variable ids
string sds
string sw
string wavenames = ""
HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath
if (!v_flag)
variable ids
variable nds = ItemsInList(S_HDF5ListGroup, ";")
string sds
string sw
for (ids = 0; ids < nds; ids += 1)
sds = StringFromList(ids, S_HDF5ListGroup, ";")
sw = psh5_load_dataset(fileID, "", sds, set_scale=0)
wavenames = AddListItem(sw, wavenames, ";", inf)
endfor
endif
for (ids = 0; ids < nds; ids += 1)
sds = StringFromList(ids, datasets)
sw = psh5_load_dataset(fileID, scanpath, sds, set_scale=0)
wavenames = AddListItem(sw, wavenames, ";", inf)
endfor
return wavenames
end
@ -617,12 +671,18 @@ end
/// - wave scaling is set if the necessary scan attributes have been loaded and the `set_scale` option is selected (default).
/// the attributes must be loaded by psh5_load_scan_meta() and psh5_load_scan_attrs() (attr_sets=2).
///
/// the dataset is loaded into the current data folder unless datasetname contains a region specifier.
/// in the latter case, the dataset is loaded into sub-folder with the name of the region.
/// the function returns from the original data folder.
///
/// @param fileID ID of open HDF5 file from psh5_open_file().
///
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
///
/// @param dataset name of the dataset.
/// @param datasetname name of the dataset.
/// the name of the loaded wave is a cleaned up version of the dataset name.
/// the name can include the region name as a relative path, e.g. "region1/ScientaSpectrum".
/// in this case, the dataset is loaded into a sub-folder named "region1".
///
/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded.
/// if multiple datasets are loaded from a file,
@ -632,6 +692,8 @@ end
///
/// @return name of loaded wave if successful. empty string otherwise.
///
/// @version this function supports regions as of version 1.03.
///
function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale])
variable fileID
string scanpath
@ -642,10 +704,24 @@ function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale])
set_scale = 1
endif
dfref base_df = GetDataFolderDFR()
string datasetpath
datasetpath = scanpath + "/" + datasetname
datasetpath = ReplaceString("//", datasetpath, "/")
string regionname
string regionpath
if (ItemsInList(datasetname, "/") >= 2)
regionname = StringFromList(0, datasetname, "/")
regionpath = ReplaceString("//", scanpath + "/" + regionname, "/")
datasetname = RemoveListItem(0, datasetname, "/")
NewDataFolder /o/s $regionname
else
regionname = ""
regionpath = scanpath
endif
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
InitHDF5DataInfo(di)
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
@ -659,12 +735,12 @@ function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale])
HDF5LoadData /O /Q /Z fileID, datasetpath
dataname = StringFromList(0, S_waveNames)
else
dataname = psh5_load_dataset_slabs(fileID, scanpath, datasetname)
dataname = psh5_load_dataset_slabs(fileID, regionpath, datasetname)
endif
wave /z data = $dataname
if (waveexists(data))
psh5_load_dataset_meta(fileID, scanpath, datasetname, data)
psh5_load_dataset_meta(fileID, regionpath, datasetname, data)
ps_set_dimlabels(data)
if (set_scale)
ps_scale_dataset(data)
@ -673,19 +749,22 @@ function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale])
dataname = ""
endif
setdatafolder base_df
return dataname
end
/// select the preferred dataset from a list of available datasets.
///
/// @param file_datasets semicolon-separated list of datasets that are available in the file.
/// the items may include a path separated by slashes "/".
/// only the last component of the path is checked.
///
/// @param pref_datasets semicolon-separated list of preferred datasets.
/// the items of the list are match strings for the Igor StringMatch function.
/// the first matching dataset is loaded from the file.
/// if no match is found, the first file dataset is selected.
///
/// @return name of selected dataset.
/// @return selected dataset.
///
static function /s select_dataset(file_datasets, pref_datasets)
string file_datasets
@ -695,6 +774,7 @@ static function /s select_dataset(file_datasets, pref_datasets)
variable nds = ItemsInList(file_datasets)
variable ids
string sds = ""
string mds = ""
variable np = ItemsInList(pref_datasets)
variable ip
string sp
@ -704,9 +784,9 @@ static function /s select_dataset(file_datasets, pref_datasets)
for (ids = 0; ids < nds; ids += 1)
sds = StringFromList(ids, file_datasets)
index = ItemsInList(sds, "/") - 1
sds = StringFromList(index, sds, "/")
mds = StringFromList(index, sds, "/")
sp = StringFromList(ip, pref_datasets)
if (StringMatch(sds, sp))
if (StringMatch(mds, sp))
found = 1
break
endif
@ -718,8 +798,6 @@ static function /s select_dataset(file_datasets, pref_datasets)
if (!found)
ids = 0
sds = StringFromList(ids, file_datasets)
index = ItemsInList(sds, "/") - 1
sds = StringFromList(index, sds, "/")
endif
endif
@ -766,7 +844,7 @@ function /s psh5_load_scan_preview(fileID, scanpath, [set_scale, pref_datasets])
dfref saveDF = GetDataFolderDFR()
dfref dataDF = saveDF
string datasets = psh5_list_scan_datasets(fileID, scanpath)
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
string datasetname = select_dataset(datasets, pref_datasets)
string datasetpath
datasetpath = scanpath + "/" + datasetname
@ -995,7 +1073,8 @@ end
/// @param datapath path to the containing group in the HDF5 file.
/// path separator is the slash "/".
///
/// @param dataset name of the dataset.
/// @param datasetname name of the dataset.
/// may include relative path.
///
/// @param datawave metadata is added to the wave note of this wave.
///
@ -1364,42 +1443,92 @@ function ps_set_dimlabels2(data, name)
return 0
end
/// find the scan folder
///
/// the scan folder is the one that contains the :attr folder
/// the data and scan folders may refer to the same folder.
///
static function /df find_scan_folder(dataDF)
dfref dataDF
dfref attrDF = dataDF:attr
if (!DataFolderRefStatus(attrDF))
string df = GetDataFolder(1, dataDF) + ":"
dfref scanDF = $df
else
dfref scanDF = dataDF
endif
return scanDF
end
/// find the attributes data folder
///
/// this is the :attr folder.
///
static function /df find_attr_folder(dataDF)
dfref dataDF
dfref attrDF = dataDF:attr
if (!DataFolderRefStatus(attrDF))
string df = GetDataFolder(1, dataDF) + ":"
dfref scanDF = $df
dfref attrDF = scanDF:attr
endif
return attrDF
end
/// set the dimension scales of loaded PShell Scienta datasets according to attributes.
///
/// the datasets must be in the current data folder.
/// all datasets listed in the ScanReadables waves are scaled
/// according to the attribute waves in the :attr folder.
/// datasets listed in the ScanReadables waves are scaled
/// according to the attribute waves in the data, scan, and attributes folders,
/// whichever is found first.
///
/// the dimension labels of the dataset waves must be set correctly, e.g. by ps_set_dimlabels().
/// the current datafolder must contain the ScanReadables wave and the :attr folder.
/// the ScanReadables text wave contains names of the waves to scale.
/// wave names can include a relative path to a sub-folder. the path separator is "/".
///
/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels().
/// this is implicitly done by the high-level load functions.
///
/// @version this function supports regions from version 1.03.
/// check that you're in the correct data folder!
///
function ps_scale_datasets()
dfref dataDF = GetDataFolderDFR()
dfref attrDF = :attr
dfref scanDF = GetDataFolderDFR()
dfref attrDF = find_attr_folder(scanDF)
make /n=3 /free lo, hi
make /n=3 /t /free ax, un
ps_detect_scale(ax, lo, hi, un)
wave /t /z /SDFR=dataDF ScanReadables
wave /t /z /SDFR=scanDF ScanReadables
if (WaveExists(ScanReadables))
variable isr
variable nsr = numpnts(ScanReadables)
string ssr
string sdf
for (isr = 0; isr < nsr; isr += 1)
wave /z /SDFR=dataDF wsr = $ScanReadables[isr]
setdatafolder scanDF
ssr = ScanReadables[isr]
if (ItemsInList(ssr, "/") >= 2)
sdf = StringFromList(0, ssr, "/")
ssr = RemoveListItem(0, ssr, "/")
setdatafolder $sdf
endif
wave /z wsr=$ssr
if (WaveExists(wsr))
ps_detect_scale(ax, lo, hi, un)
ps_scale_dataset_2(wsr, ax, lo, hi, un)
endif
endfor
endif
setdatafolder scanDF
end
/// set the dimension scales of a loaded PShell Scienta dataset according to attributes.
///
/// the attributes must be in the child folder `:attr` next to the dataset.
/// the current datafolder must contain the :attr folder.
/// the data wave can be in the current folder or a sub-folder.
///
/// the dimension labels of the dataset waves must be set correctly, cf. ps_set_dimlabels().
/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels().
/// this is implicitly done by the high-level load functions.
///
/// the function is useful if a single dataset is loaded and scaled.
@ -1408,27 +1537,48 @@ end
/// @param data data wave to be scaled.
/// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels().
///
/// @version this function supports regions from version 1.03.
///
function ps_scale_dataset(data)
wave data
dfref saveDF = GetDataFolderDFR()
dfref dataDF = GetWavesDataFolderDFR(data)
setdatafolder dataDF
setdatafolder dataDF
make /n=3 /free lo, hi
make /n=3 /t /free ax, un
ps_detect_scale(ax, lo, hi, un)
ps_scale_dataset_2(data, ax, lo, hi, un)
setdatafolder saveDF
end
static function /wave find_scale_wave(name, dataDF, scanDF, attrDF)
string name
dfref dataDF
dfref scanDF
dfref attrDF
wave /SDFR=dataDF /Z w = $name
if (!WaveExists(w))
wave /SDFR=scanDF /Z w = $name
if (!WaveExists(w))
wave /SDFR=attrDF /Z w = $name
endif
endif
return w
end
/// detect the dimension scales from attributes.
///
/// the function checks the current data folder and the sub-folder `:attr` for scan parameters.
/// the function checks the data , scan and attributes folders for scan parameters.
/// the results are written to the provided waves.
/// the function is normally called by ps_scale_datasets() but can also be used independently.
///
/// the current datafolder must be the data or the scan folder.
/// the data folder contains the waves that are to be scaled.
/// the scan folder contains the scan positions and the :attr folder.
///
/// the provided waves are redimensioned by the function, and dimension labels are set.
/// the scale parameters can then be extracted by keyword, e.g.,
/// @arg `lo[%%energy]` analyser energy dimension.
@ -1436,13 +1586,14 @@ end
/// @arg `lo[%%scan]` scan dimension.
/// @arg `lo[%%data]` data dimension.
///
/// the function tries to read the following waves,
/// and may fall back to more or less reasonable default values if they are not found.
/// @arg `:attr:LensMode`
/// @arg `:attr:ScientaChannelBegin`
/// @arg `:attr:ScientaChannelEnd`
/// @arg `:attr:ScientaSliceBegin`
/// @arg `:attr:ScientaSliceEnd`
/// the function tries to read the following waves, in the data, scan, and attributes folders,
/// where the first folder in the list takes precedence.
/// it may fall back to more or less reasonable default values if no data is not found.
/// @arg `LensMode`
/// @arg `ScientaChannelBegin`
/// @arg `ScientaChannelEnd`
/// @arg `ScientaSliceBegin`
/// @arg `ScientaSliceEnd`
/// @arg `ScanWritables`
/// @arg wave referenced by `ScanWritables[0]`
///
@ -1456,6 +1607,9 @@ end
///
/// @return the function results are written to the lo, hi, un, and ax waves.
///
/// @version this function supports regions from version 1.03.
/// check that you're in the correct data folder!
///
function ps_detect_scale(ax, lo, hi, un)
wave /t ax
wave lo
@ -1463,7 +1617,8 @@ function ps_detect_scale(ax, lo, hi, un)
wave /t un
dfref dataDF = GetDataFolderDFR()
dfref attrDF = :attr
dfref scanDF = find_scan_folder(dataDF)
dfref attrDF = find_attr_folder(dataDF)
redimension /n=4 lo, hi, un, ax
setdimlabel 0, 0, $kEnergyDimLabel, lo, hi, un, ax
@ -1493,10 +1648,10 @@ function ps_detect_scale(ax, lo, hi, un)
ax[%$kDataDimLabel] = "value"
wave /SDFR=attrDF /T /Z LensMode
wave /SDFR=attrDF /Z ChannelBegin = ScientaChannelBegin
wave /SDFR=attrDF /Z ChannelEnd = ScientaChannelEnd
wave /SDFR=attrDF /Z SliceBegin = ScientaSliceBegin
wave /SDFR=attrDF /Z SliceEnd = ScientaSliceEnd
wave /Z ChannelBegin = find_scale_wave("ScientaChannelBegin", dataDF, scanDF, attrDF)
wave /Z ChannelEnd = find_scale_wave("ScientaChannelEnd", dataDF, scanDF, attrDF)
wave /Z SliceBegin = find_scale_wave("ScientaSliceBegin", dataDF, scanDF, attrDF)
wave /Z SliceEnd = find_scale_wave("ScientaSliceEnd", dataDF, scanDF, attrDF)
// lens mode can give more detail
if (waveexists(LensMode) && (numpnts(LensMode) >= 1))
@ -1504,13 +1659,13 @@ function ps_detect_scale(ax, lo, hi, un)
case "Angular45":
lo[%$kAngleDimLabel] = -45/2
hi[%$kAngleDimLabel] = +45/2
un[%$kAngleDimLabel] = "deg"
un[%$kAngleDimLabel] = "<EFBFBD>"
ax[%$kAngleDimLabel] = "angle"
break
case "Angular60":
lo[%$kAngleDimLabel] = -60/2
hi[%$kAngleDimLabel] = +60/2
un[%$kAngleDimLabel] = "deg"
un[%$kAngleDimLabel] = "<EFBFBD>"
ax[%$kAngleDimLabel] = "angle"
break
case "Transmission":
@ -1529,9 +1684,10 @@ function ps_detect_scale(ax, lo, hi, un)
lo[%$kAngleDimLabel] = SliceBegin[0]
hi[%$kAngleDimLabel] = SliceEnd[0]
endif
wave /z /t /SDFR=dataDF ScanWritables
wave /z /t /SDFR=scanDF ScanWritables
if (WaveExists(ScanWritables))
wave /z /SDFR=dataDF scanner = $ScanWritables[0]
wave /z /SDFR=scanDF scanner = $ScanWritables[0]
if (!WaveExists(scanner))
wave /z /SDFR=attrDF scanner = $ScanWritables[0]
endif
@ -1553,6 +1709,7 @@ function ps_detect_scale(ax, lo, hi, un)
case "RefocusZTrans":
case "ExitSlitY":
un[%$kScanDimLabel] = "mm"
break
case "ExitSlit":
un[%$kScanDimLabel] = "<22>m"
break
@ -1560,6 +1717,7 @@ function ps_detect_scale(ax, lo, hi, un)
case "ManipulatorTilt":
case "ManipulatorPhi":
un[%$kScanDimLabel] = "<22>"
break
case "FocusXRot":
case "FocusYRot":
case "FocusZRot":
@ -1567,6 +1725,7 @@ function ps_detect_scale(ax, lo, hi, un)
case "RefocusYRot":
case "RefocusZRot":
un[%$kScanDimLabel] = "mrad"
break
endswitch
endif
endif
@ -1603,6 +1762,8 @@ end
/// @param un unit labels.
/// the unit labels are applied using the SetScale operation.
///
/// @version this function supports regions from version 1.03.
///
function ps_scale_dataset_2(data, ax, lo, hi, un)
wave data
wave /t ax
@ -1746,8 +1907,11 @@ function /s psh5_load_reduced(ANickName, APathName, AFileName, reduction_func, r
HDF5LoadData /O /Q /Z fileID, positionerpath
endif
endif
setdatafolder dataDF
wavenames = psh5_load_dataset_reduced(fileID, scanpath, "ScientaImage", reduction_func, reduction_param, progress=progress)
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
string dataset = select_dataset(datasets, "ScientaImage")
wavenames = psh5_load_dataset_reduced(fileID, scanpath, dataset, reduction_func, reduction_param, progress=progress)
psh5_close_file(fileID)
endif
@ -1783,7 +1947,10 @@ end
///
/// @param scanpath path to scan group in the HDF5 file.
///
/// @param dataset name of the dataset.
/// @param datasetname name of the dataset.
/// the name of the loaded wave is a cleaned up version of the dataset name.
/// the name can include the region name as a relative path, e.g. "region1/ScientaSpectrum".
/// in this case, the dataset is loaded into a sub-folder named "region1".
///
/// @param reduction_func custom reduction function
/// (any user-defined function which has the same parameters as adh5_default_reduction()).
@ -1804,6 +1971,8 @@ end
/// empty string if an error occurred.
/// error messages are printed to the history.
///
/// @version this function supports regions as of version 1.03.
///
function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_func, reduction_param, [progress, nthreads])
variable fileID
string scanpath
@ -1820,6 +1989,7 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
nthreads = -1
endif
dfref base_df = GetDataFolderDFR()
variable result = 0
string datasetpath
string datawavename
@ -1829,6 +1999,18 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
datasetpath = ReplaceString("//", datasetpath, "/")
datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/")
string regionname
string regionpath
if (ItemsInList(datasetname, "/") >= 2)
regionname = StringFromList(0, datasetname, "/")
regionpath = ReplaceString("//", scanpath + "/" + regionname, "/")
datasetname = RemoveListItem(0, datasetname, "/")
NewDataFolder /o/s $regionname
else
regionname = ""
regionpath = scanpath
endif
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
InitHDF5DataInfo(di)
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
@ -2035,6 +2217,7 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
kill_progress_panel()
endif
setdatafolder base_df
return wavenames
end