2636 lines
75 KiB
Igor
2636 lines
75 KiB
Igor
#pragma TextEncoding = "UTF-8"
|
|
#pragma rtGlobals=3 // Use modern global access method and strict wave access.
|
|
#pragma IgorVersion = 6.36
|
|
#pragma ModuleName = PearlPShellImport
|
|
#pragma version = 1.11
|
|
#include <HDF5 Browser>
|
|
#include "pearl-compat"
|
|
#include "pearl-gui-tools"
|
|
#include "pearl-area-import"
|
|
|
|
// copyright (c) 2013-21 Paul Scherrer Institut
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
// http:///www.apache.org/licenses/LICENSE-2.0
|
|
|
|
/// @file
|
|
/// @brief import data from PShell
|
|
/// @ingroup ArpesPackage
|
|
///
|
|
///
|
|
/// HDF5 file import from the PShell data acquisition program.
|
|
/// the main import functions are:
|
|
///
|
|
/// - psh5_load_complete()
|
|
/// load all scans and datasets from a file.
|
|
///
|
|
/// - psh5_load_reduced()
|
|
/// load the ScientaImage dataset of the first scan and reduce its dimensionality.
|
|
///
|
|
/// - psh5_load_scan_complete()
|
|
/// load all datasets of a selected scan.
|
|
///
|
|
/// - psh5_load_scan_preview()
|
|
/// load a preview of a selected scan.
|
|
///
|
|
/// - psh5_load_dataset()
|
|
/// load a selected dataset.
|
|
///
|
|
/// - psh5_load_dataset_reduced()
|
|
/// load a selected dataset and reduce its dimensionality.
|
|
///
|
|
/// the following helper functions are also needed:
|
|
///
|
|
/// - psh5_open_file()
|
|
/// - psh5_close_file()
|
|
/// - psh5_list_scans()
|
|
/// - psh5_list_scan_datasets()
|
|
/// - psh5_load_scan_meta()
|
|
/// - psh5_load_scan_attrs()
|
|
///
|
|
/// @author matthias muntwiler, matthias.muntwiler@psi.ch
|
|
///
|
|
/// @copyright 2013-21 Paul Scherrer Institut @n
|
|
/// Licensed under the Apache License, Version 2.0 (the "License"); @n
|
|
/// you may not use this file except in compliance with the License. @n
|
|
/// You may obtain a copy of the License at
|
|
/// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
/// @namespace PearlPShellImport
|
|
/// @brief import data from PShell
|
|
///
|
|
/// PearlPShellImport is declared in @ref pearl-pshell-import.ipf.
|
|
|
|
/// Dimension label for the energy dispersive dimension of multi-dimensional datasets
|
|
strconstant kEnergyDimLabel = "energy"
|
|
|
|
/// Dimension label for the angle dispersive dimension of multi-dimensional datasets
|
|
strconstant kAngleDimLabel = "angle"
|
|
|
|
/// Dimension label for the scan dimension of multi-dimensional datasets
|
|
strconstant kScanDimLabel = "scan"
|
|
|
|
/// Dimension label for the data dimension.
|
|
/// This label may be used to store the parameters for the `setscale d` operation.
|
|
strconstant kDataDimLabel = "data"
|
|
|
|
/// List of preferred datasets to load for preview
|
|
strconstant kPreviewDatasets = "ImageEnergyDistribution;ScientaSpectrum;ScientaImage;Counts;SampleCurrent;"
|
|
|
|
/// List of datasets that must be loaded to determine the axis scaling of a Scienta image
|
|
strconstant kScientaScalingDatasets = "LensMode;ScientaChannelBegin;ScientaChannelEnd;ScientaSliceBegin;ScientaSliceEnd;"
|
|
|
|
/// List of datasets that should be transposed upon loading
|
|
strconstant kTransposedDatasets = "ScientaImage;"
|
|
|
|
/// multiply scienta detector intensity by this value to get actual counts.
|
|
constant kDetectorSensitivity = 1
|
|
|
|
/// open a HDF5 file created by the PShell data acquisition program and prepare the data folder.
|
|
///
|
|
/// the function opens a specified or interactively selected HDF5 file,
|
|
/// creates a data folder `$ANickName` under root,
|
|
/// and changes to the new data folder.
|
|
///
|
|
/// the file must be closed by psh5_close_file() after use.
|
|
///
|
|
/// @param ANickName destination folder name (top level under root).
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up
|
|
///
|
|
/// @return ID of open HDF5 file from HDF5OpenFile.
|
|
/// zero if an error occurred.
|
|
///
|
|
/// @return global string s_filepath in new data folder contains the full file path on disk.
|
|
///
|
|
/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file.
|
|
///
|
|
function psh5_open_file(ANickName, APathName, AFileName)
|
|
string ANickName
|
|
string APathName
|
|
string AFileName
|
|
|
|
setdatafolder root:
|
|
newdatafolder /s /o $("root:" + ANickName)
|
|
dfref fileDF = GetDataFolderDFR()
|
|
|
|
variable fileID
|
|
HDF5OpenFile /P=$APathName /R fileID as AFileName
|
|
if (v_flag == 0)
|
|
string /g s_filepath
|
|
string /g s_scanpaths
|
|
s_filepath = s_path + s_filename
|
|
s_scanpaths = psh5_list_scans(fileID)
|
|
else
|
|
fileID = 0
|
|
endif
|
|
|
|
return fileID
|
|
end
|
|
|
|
/// close a HDF5 file opened by psh5_open_file.
|
|
///
|
|
/// this function just closes the HDF5 file.
|
|
/// no change is made to the loaded data.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
function psh5_close_file(fileID)
|
|
variable fileID
|
|
|
|
HDF5CloseFile fileID
|
|
end
|
|
|
|
/// load everything from a PShell data file.
|
|
///
|
|
/// @param ANickName destination folder name (top level under root)
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up
|
|
///
|
|
/// @param load_data select whether datasets (positioners and detectors) are loaded.
|
|
/// @arg 1 (default) load data.
|
|
/// @arg 0 do not load data.
|
|
///
|
|
/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded.
|
|
/// for proper wave scaling, the attributes must be loaded.
|
|
/// @arg 1 (default) load attributes.
|
|
/// @arg 0 do not load attributes.
|
|
///
|
|
/// @return complete path of the loaded file if successful.
|
|
/// empty string otherwise.
|
|
///
|
|
/// @return global string s_filepath in new data folder contains the full file path on disk.
|
|
///
|
|
/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file.
|
|
///
|
|
function /s psh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr])
|
|
string ANickName
|
|
string APathName
|
|
string AFileName
|
|
variable load_data
|
|
variable load_attr
|
|
|
|
if (ParamIsDefault(load_data))
|
|
load_data = 1
|
|
endif
|
|
if (ParamIsDefault(load_attr))
|
|
load_attr = 1
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
|
|
// performance monitoring
|
|
variable timerRefNum
|
|
variable /g psh5_perf_secs
|
|
timerRefNum = startMSTimer
|
|
|
|
variable fileID = psh5_open_file(ANickName, APathName, AFileName)
|
|
if (fileID)
|
|
dfref fileDF = GetDataFolderDFR()
|
|
svar s_filepath
|
|
svar s_scanpaths
|
|
AFileName = s_filepath
|
|
print "loading " + s_filepath + "\r"
|
|
|
|
variable ig
|
|
variable ng = ItemsInList(s_scanpaths, ";")
|
|
string sg
|
|
string folder
|
|
|
|
for (ig = 0; ig < ng; ig += 1)
|
|
sg = StringFromList(ig, s_scanpaths, ";")
|
|
folder = ReplaceString("/", sg, "")
|
|
folder = ReplaceString(" ", folder, "")
|
|
folder = PearlCleanupName(folder)
|
|
setdatafolder fileDF
|
|
newdatafolder /s /o $folder
|
|
psh5_load_scan_complete(fileID, sg, load_data=load_data, load_attr=load_attr)
|
|
endfor
|
|
|
|
psh5_close_file(fileID)
|
|
else
|
|
AFileName = ""
|
|
endif
|
|
|
|
psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
|
|
|
setdatafolder saveDF
|
|
return AFileName
|
|
end
|
|
|
|
/// load a preview image from a PShell data file.
|
|
///
|
|
/// the data wave is loaded into the current data folder.
|
|
/// attributes are loaded into the attr subfolder. existing waves in attr are deleted.
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up
|
|
///
|
|
/// @param load_data 1 (default): load data; 0: do not load data
|
|
///
|
|
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
|
|
/// note: for correct scaling of the image, the attributes need to be loaded
|
|
///
|
|
/// @param pref_scans semicolon-separated list of preferred scans.
|
|
/// the items of the list are match strings for the Igor StringMatch function.
|
|
/// the first matching scan (i.e. top-level HDF5 group with a matching name) is loaded from the file.
|
|
/// if no match is found, the first scan is loaded.
|
|
///
|
|
/// @param pref_datasets semicolon-separated list of preferred datasets.
|
|
/// the items of the list are match strings for the Igor StringMatch function.
|
|
/// the first matching dataset is loaded from the file.
|
|
/// if no match is found, the first dataset listed in the file is loaded.
|
|
///
|
|
/// @return name of loaded preview wave.
|
|
///
|
|
function /s psh5_load_preview(APathName, AFileName, [load_data, load_attr, pref_scans, pref_datasets])
|
|
string APathName
|
|
string AFileName
|
|
variable load_data
|
|
variable load_attr
|
|
string pref_scans
|
|
string pref_datasets
|
|
|
|
if (ParamIsDefault(load_data))
|
|
load_data = 1
|
|
endif
|
|
if (ParamIsDefault(load_attr))
|
|
load_attr = 1
|
|
endif
|
|
if (ParamIsDefault(pref_scans))
|
|
pref_scans = "*scan1*;"
|
|
endif
|
|
if (ParamIsDefault(pref_datasets))
|
|
pref_datasets = ""
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
|
|
variable fileID
|
|
string scanpaths = ""
|
|
string dataname = ""
|
|
|
|
// performance monitoring
|
|
variable timerRefNum
|
|
variable /g adh5_perf_secs
|
|
timerRefNum = startMSTimer
|
|
|
|
HDF5OpenFile /P=$APathName /R /Z fileID as AFileName
|
|
if (v_flag == 0)
|
|
AFileName = s_path + s_filename
|
|
dfref fileDF = GetDataFolderDFR()
|
|
|
|
scanpaths = psh5_list_scans(fileID)
|
|
variable ng = ItemsInList(scanpaths)
|
|
variable ig
|
|
string sg
|
|
variable np = ItemsInList(pref_scans)
|
|
variable ip
|
|
string sp
|
|
variable found = 0
|
|
if (ng > 0)
|
|
for (ip = 0; ip < np; ip += 1)
|
|
for (ig = 0; ig < ng; ig += 1)
|
|
sg = StringFromList(ig, scanpaths)
|
|
sp = StringFromList(ip, pref_scans)
|
|
if (StringMatch(sg, sp))
|
|
found = 1
|
|
break
|
|
endif
|
|
endfor
|
|
if (found)
|
|
break
|
|
endif
|
|
endfor
|
|
if (!found)
|
|
ig = 0
|
|
endif
|
|
sg = StringFromList(ig, scanpaths)
|
|
|
|
if (load_attr)
|
|
setdatafolder fileDF
|
|
newdatafolder /o/s attr
|
|
killwaves /a/z
|
|
psh5_load_scan_attrs(fileID, sg)
|
|
endif
|
|
|
|
setdatafolder fileDF
|
|
dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr, pref_datasets=pref_datasets)
|
|
else
|
|
print "no scans found in file " + AFileName
|
|
endif
|
|
|
|
HDF5CloseFile fileID
|
|
endif
|
|
|
|
if (timerRefNum >= 0)
|
|
adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
|
endif
|
|
|
|
setdatafolder saveDF
|
|
return dataname
|
|
end
|
|
|
|
/// load organizational metadata from the general group.
|
|
///
|
|
/// the general group contains the following datasets:
|
|
/// authors, pgroup, proposal, proposer, sample.
|
|
///
|
|
/// data is loaded into the current data folder.
|
|
/// all items are loaded into strings, authors is a comma-separated list.
|
|
/// missing items default to empty strings.
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up
|
|
///
|
|
/// @return semicolon-separated list of the objects.
|
|
///
|
|
function /s psh5_load_general_group(APathName, AFileName)
|
|
string APathName
|
|
string AFileName
|
|
|
|
variable fileID
|
|
|
|
HDF5OpenFile /P=$APathName /R /Z fileID as AFileName
|
|
if (v_flag == 0)
|
|
string obj_names = "authors;pgroup;proposal;proposer;sample;"
|
|
variable nn = ItemsInList(obj_names, ";")
|
|
variable ii
|
|
string name
|
|
|
|
for (ii = 0; ii < nn; ii += 1)
|
|
name = StringFromList(ii, obj_names, ";")
|
|
psh_load_general_string(fileID, name)
|
|
endfor
|
|
|
|
return obj_names
|
|
else
|
|
return ""
|
|
endif
|
|
end
|
|
|
|
/// load a string from the general group.
|
|
///
|
|
/// the general group contains the following datasets:
|
|
/// authors, pgroup, proposal, proposer, sample.
|
|
///
|
|
/// data is loaded into a global string in the current data folder.
|
|
/// arrays with multiple items are loaded into a comma-separated list.
|
|
/// a missing item defaults to the empty string.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @return comma-separated list of values.
|
|
///
|
|
function /s psh_load_general_string(fileID, name)
|
|
variable fileID
|
|
string name
|
|
|
|
string path = "/general/" + name
|
|
HDF5LoadData /O /Q /Z /N=wt_load_general /TYPE=1 fileID, path
|
|
string values = ""
|
|
if (!v_flag)
|
|
wave /t wt_load_general
|
|
variable nn = numpnts(wt_load_general)
|
|
variable ii
|
|
for (ii = 0; ii < nn; ii += 1)
|
|
values = AddListItem(wt_load_general[ii], values, ",", inf)
|
|
endfor
|
|
killwaves /z wt_load_general
|
|
if (strlen(values) >= 1)
|
|
values = values[0,strlen(values)-2]
|
|
endif
|
|
endif
|
|
string /g $name = values
|
|
return values
|
|
end
|
|
|
|
/// load all data of a selected scan from a PShell data file.
|
|
///
|
|
/// data is loaded into the current data folder.
|
|
/// attribute datasets are loaded into sub-folder `attr`.
|
|
/// region datasets are loaded into region sub-folders.
|
|
/// existing data, if present, is overwritten.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @param load_data select whether datasets (positioners and detectors) are loaded.
|
|
/// @arg 1 (default) load data.
|
|
/// @arg 0 do not load data.
|
|
///
|
|
/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded.
|
|
/// for proper wave scaling, the attributes must be loaded.
|
|
/// @arg 1 (default) load attributes.
|
|
/// @arg 0 do not load attributes.
|
|
///
|
|
/// @return semicolon-separated list of the loaded data waves (excluding attributes).
|
|
///
|
|
function /s psh5_load_scan_complete(fileID, scanpath, [load_data, load_attr])
|
|
variable fileID
|
|
string scanpath
|
|
variable load_data
|
|
variable load_attr
|
|
|
|
if (ParamIsDefault(load_data))
|
|
load_data = 1
|
|
endif
|
|
if (ParamIsDefault(load_attr))
|
|
load_attr = 1
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
|
|
dfref dataDF = GetDataFolderDFR()
|
|
string wavenames
|
|
string attrnames
|
|
psh5_load_scan_meta(fileID, scanpath)
|
|
if (load_attr)
|
|
newdatafolder /s /o attr
|
|
attrnames = psh5_load_scan_attrs(fileID, scanpath)
|
|
endif
|
|
if (load_data)
|
|
setdatafolder dataDF
|
|
wavenames = psh5_load_scan_data(fileID, scanpath)
|
|
endif
|
|
if (load_data && load_attr)
|
|
setdatafolder dataDF
|
|
ps_scale_datasets()
|
|
endif
|
|
|
|
setdatafolder saveDF
|
|
return wavenames
|
|
end
|
|
|
|
/// list scan groups of a PShell data file.
|
|
///
|
|
/// the function returns a list of all top-level groups whose name starts with "scan".
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @return semicolon-separated list of group paths.
|
|
///
|
|
function /s psh5_list_scans(fileID)
|
|
variable fileID
|
|
|
|
HDF5ListGroup /F /TYPE=1 fileID, "/"
|
|
|
|
variable ig
|
|
variable ng = ItemsInList(S_HDF5ListGroup, ";")
|
|
string sg
|
|
string scans = ""
|
|
|
|
for (ig = 0; ig < ng; ig += 1)
|
|
sg = StringFromList(ig, S_HDF5ListGroup, ";")
|
|
if (cmpstr(sg[1,4], "scan") == 0)
|
|
scans = AddListItem(sg, scans, ";", inf)
|
|
endif
|
|
endfor
|
|
|
|
return scans
|
|
end
|
|
|
|
/// list datasets of a PShell scan group.
|
|
///
|
|
/// the function returns a list of all datasets of the selected scan.
|
|
/// this does not include datasets from the attributes sub-group.
|
|
///
|
|
/// @note in a future version, an option may be introduced to filter datasets by function (_Readable_ and/or _Writable_).
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @return semicolon-separated list of dataset paths.
|
|
///
|
|
/// @version since version 1.03 this function returns paths relative to scanpath.
|
|
///
|
|
function /s psh5_list_scan_datasets(fileID, scanpath, [include_regions])
|
|
variable fileID
|
|
string scanpath
|
|
variable include_regions
|
|
|
|
if (ParamIsDefault(include_regions))
|
|
include_regions = 0
|
|
endif
|
|
string result
|
|
|
|
HDF5ListGroup /TYPE=2 /Z fileID, scanpath
|
|
result = S_HDF5ListGroup
|
|
|
|
if (include_regions)
|
|
HDF5ListGroup /R /TYPE=2 /Z fileID, scanpath
|
|
variable n = ItemsInList(S_HDF5ListGroup)
|
|
variable i
|
|
string ds
|
|
string region_datasets
|
|
for (i = 0; i < n; i += 1)
|
|
ds = StringFromList(i, S_HDF5ListGroup)
|
|
if (StringMatch(ds, "region*/*"))
|
|
//region_datasets = psh5_list_scan_datasets(fileID, ReplaceString("//", scanpath + "/" + region, "/"), include_regions=0)
|
|
result = AddListItem(ds, result, ";", inf)
|
|
endif
|
|
endfor
|
|
endif
|
|
|
|
return result
|
|
end
|
|
|
|
/// list regions of a PShell scan group.
|
|
///
|
|
/// the function returns a list of all region groups of the selected scan.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @return semicolon-separated list of datagroup paths.
|
|
///
|
|
function /s psh5_list_scan_regions(fileID, scanpath)
|
|
variable fileID
|
|
string scanpath
|
|
|
|
HDF5ListGroup /TYPE=1 /Z fileID, scanpath
|
|
variable n = ItemsInList(S_HDF5ListGroup)
|
|
variable i
|
|
string result = ""
|
|
string s
|
|
for (i = 0; i < n; i += 1)
|
|
s = StringFromList(i, S_HDF5ListGroup)
|
|
if (StringMatch(s, "region*"))
|
|
result = AddListItem(s, result, ";", inf)
|
|
endif
|
|
endfor
|
|
|
|
return result
|
|
end
|
|
|
|
/// load all datasets of a PShell scan group.
|
|
///
|
|
/// data is loaded into the current data folder.
|
|
/// region datasets are loaded into the respective region sub-folders.
|
|
///
|
|
/// this function does not scale the datasets.
|
|
/// call ps_scale_datasets() separately.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @return semicolon-separated list of the loaded waves.
|
|
///
|
|
function /s psh5_load_scan_data(fileID, scanpath)
|
|
variable fileID
|
|
string scanpath
|
|
|
|
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
|
|
variable nds = ItemsInList(datasets)
|
|
variable ids
|
|
string sds
|
|
string sw
|
|
string wavenames = ""
|
|
for (ids = 0; ids < nds; ids += 1)
|
|
sds = StringFromList(ids, datasets)
|
|
sw = psh5_load_dataset(fileID, scanpath, sds, set_scale=0)
|
|
wavenames = AddListItem(sw, wavenames, ";", inf)
|
|
endfor
|
|
|
|
return wavenames
|
|
end
|
|
|
|
/// load attributes of a PShell scan group.
|
|
///
|
|
/// "attributes" are the auxiliary data inside the attrs group.
|
|
/// do not confuse with HDF5 attributes!
|
|
/// HDF5 attributes are loaded by the psh5_load_scan_meta() function.
|
|
///
|
|
/// data is loaded into the current data folder.
|
|
/// this should normally be the `:attr` folder inside the respective scan folder.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @param attr_sets specify the attribute sets to be loaded.
|
|
/// this value can be an arithmetic OR of the following constants.
|
|
/// by default, all attributes are loaded.
|
|
/// @arg 1 all datasets that are present in the file.
|
|
/// @arg 2 datasets relevant for wave scaling of Scienta data.
|
|
///
|
|
/// @return semicolon-separated list of the loaded waves.
|
|
///
|
|
function /s psh5_load_scan_attrs(fileID, scanpath, [attr_sets])
|
|
variable fileID
|
|
string scanpath
|
|
variable attr_sets
|
|
|
|
if (ParamIsDefault(attr_sets))
|
|
attr_sets = 1
|
|
endif
|
|
|
|
string attr_path = ReplaceString("//", scanpath + "/attrs", "/")
|
|
string attr_list = ""
|
|
if (attr_sets & 1)
|
|
HDF5ListGroup /TYPE=2 /Z fileID, attr_path
|
|
if (!v_flag)
|
|
attr_list = S_HDF5ListGroup
|
|
endif
|
|
endif
|
|
|
|
variable ids
|
|
variable nds
|
|
string sds
|
|
|
|
if (attr_sets & 2)
|
|
nds = ItemsInList(kScientaScalingDatasets, ";")
|
|
for (ids = 0; ids < nds; ids += 1)
|
|
sds = StringFromList(ids, kScientaScalingDatasets)
|
|
if (WhichListItem(sds, attr_list) < 0)
|
|
attr_list = AddListItem(sds, attr_list, ";", inf)
|
|
endif
|
|
endfor
|
|
endif
|
|
|
|
nds = ItemsInList(attr_list, ";")
|
|
string wavenames = ""
|
|
for (ids = 0; ids < nds; ids += 1)
|
|
sds = StringFromList(ids, attr_list, ";")
|
|
HDF5LoadData /O /Q /Z fileID, attr_path + "/" + sds
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
endfor
|
|
wavenames = ReplaceString(";;", wavenames, ";")
|
|
|
|
return wavenames
|
|
end
|
|
|
|
/// load metadata of a PShell scan group.
|
|
///
|
|
/// _metadata_ are the HDF5 attributes attached to the scan group.
|
|
/// the following attributes are loaded.
|
|
/// the respective wave names under Igor are given in parentheses.
|
|
///
|
|
/// - Dimensions (ScanDimensions)
|
|
/// - Writables (ScanWritables)
|
|
/// - Readables (ScanReadables)
|
|
/// - Steps (ScanSteps)
|
|
/// - Iterations (ScanIterations) - if present (XPSSpectrum script)
|
|
/// - Step Size (ScanStepSize) - if present (XPSSpectrum script)
|
|
/// - Step Time (ScanStepTime) - if present (XPSSpectrum script)
|
|
///
|
|
/// if they are missing in the file, `ScanDimensions` and `ScanReadables` are set to default values
|
|
/// assuming the file contains a single spectrum.
|
|
///
|
|
/// data is loaded into the current data folder.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @return semicolon-separated list of the loaded waves.
|
|
///
|
|
function /s psh5_load_scan_meta(fileID, scanpath)
|
|
variable fileID
|
|
string scanpath
|
|
string wavenames = ""
|
|
|
|
HDF5LoadData /O /Q /Z /A="Dimensions" /N=ScanDimensions /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
else
|
|
make /n=1 /o ScanDimensions
|
|
ScanDimensions = 0
|
|
wavenames = AddListItem("ScanDimensions", wavenames, ";", inf)
|
|
endif
|
|
HDF5LoadData /O /Q /Z /A="Readables" /N=ScanReadables /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
else
|
|
make /n=1 /o /t ScanReadables
|
|
ScanReadables[0] = "ScientaSpectrum"
|
|
wavenames = AddListItem("ScanReadables", wavenames, ";", inf)
|
|
endif
|
|
HDF5LoadData /O /Q /Z /A="Writables" /N=ScanWritables /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
HDF5LoadData /O /Q /Z /A="Steps" /N=ScanSteps /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
wavenames = ReplaceString(";;", wavenames, ";")
|
|
|
|
// additional attributes from XPSSpectrum.py
|
|
HDF5LoadData /O /Q /Z /A="Iterations" /N=ScanIterations /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
HDF5LoadData /O /Q /Z /A="Step Size" /N=ScanStepSize /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
HDF5LoadData /O /Q /Z /A="Step Time" /N=ScanStepTime /TYPE=1 fileID, scanpath
|
|
if (!v_flag)
|
|
wavenames = AddListItem(s_wavenames, wavenames, ";", inf)
|
|
endif
|
|
|
|
return wavenames
|
|
end
|
|
|
|
/// load a dataset from an open PShell HDF5 file.
|
|
///
|
|
/// if the dataset has a maximum of two dimensions, the function loads it at once.
|
|
/// if it has more than two dimension, the function calls psh5_load_dataset_slabs() to load the data slab by slab.
|
|
///
|
|
/// - the metadata (HDF5 attributes) are loaded into the wave note, cf. psh5_load_dataset_meta().
|
|
/// - dimension labels are set according the dataset name, cf. ps_set_dimlabels().
|
|
/// - wave scaling is set if the necessary scan attributes have been loaded and the `set_scale` option is selected (default).
|
|
/// the attributes must be loaded by psh5_load_scan_meta() and psh5_load_scan_attrs() (attr_sets=2).
|
|
///
|
|
/// the dataset is loaded into the current data folder unless datasetname contains a region specifier.
|
|
/// in the latter case, the dataset is loaded into sub-folder with the name of the region.
|
|
/// the function returns from the original data folder.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @param datasetname name of the dataset.
|
|
/// the name of the loaded wave is a cleaned up version of the dataset name.
|
|
/// the name can include the region name as a relative path, e.g. "region1/ScientaSpectrum".
|
|
/// in this case, the dataset is loaded into a sub-folder named "region1".
|
|
///
|
|
/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded.
|
|
/// if multiple datasets are loaded from a file,
|
|
/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets().
|
|
/// @arg 1 (default) set the wave scaling.
|
|
/// @arg 0 do not set the wave scaling.
|
|
///
|
|
/// @return name of loaded wave if successful. empty string otherwise.
|
|
///
|
|
/// @version this function supports regions as of version 1.03.
|
|
///
|
|
function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale])
|
|
variable fileID
|
|
string scanpath
|
|
string datasetname
|
|
variable set_scale
|
|
|
|
if (ParamIsDefault(set_scale))
|
|
set_scale = 1
|
|
endif
|
|
|
|
dfref base_df = GetDataFolderDFR()
|
|
|
|
string datasetpath
|
|
datasetpath = scanpath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
|
|
string regionname
|
|
string regionpath
|
|
if (ItemsInList(datasetname, "/") >= 2)
|
|
regionname = StringFromList(0, datasetname, "/")
|
|
regionpath = ReplaceString("//", scanpath + "/" + regionname, "/")
|
|
datasetname = RemoveListItem(0, datasetname, "/")
|
|
NewDataFolder /o/s $regionname
|
|
else
|
|
regionname = ""
|
|
regionpath = scanpath
|
|
endif
|
|
|
|
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
return ""
|
|
endif
|
|
|
|
string dataname
|
|
if (di.ndims < 2)
|
|
HDF5LoadData /O /Q /Z fileID, datasetpath
|
|
dataname = StringFromList(0, S_waveNames)
|
|
else
|
|
dataname = psh5_load_dataset_slabs(fileID, regionpath, datasetname)
|
|
endif
|
|
|
|
wave /z data = $dataname
|
|
if (waveexists(data))
|
|
psh5_load_dataset_meta(fileID, regionpath, datasetname, data)
|
|
ps_set_dimlabels(data)
|
|
if (set_scale)
|
|
ps_scale_dataset(data)
|
|
endif
|
|
else
|
|
dataname = ""
|
|
endif
|
|
|
|
setdatafolder base_df
|
|
return dataname
|
|
end
|
|
|
|
/// select the preferred dataset from a list of available datasets.
|
|
///
|
|
/// @param file_datasets semicolon-separated list of datasets that are available in the file.
|
|
/// the items may include a path separated by slashes "/".
|
|
/// only the last component of the path is checked.
|
|
///
|
|
/// @param pref_datasets semicolon-separated list of preferred datasets.
|
|
/// the items of the list are match strings for the Igor StringMatch function.
|
|
/// the first matching dataset is loaded from the file.
|
|
/// if no match is found, the first file dataset is selected.
|
|
///
|
|
/// @return selected dataset.
|
|
///
|
|
static function /s select_dataset(file_datasets, pref_datasets)
|
|
string file_datasets
|
|
string pref_datasets
|
|
|
|
variable index
|
|
variable nds = ItemsInList(file_datasets)
|
|
variable ids
|
|
string sds = ""
|
|
string mds = ""
|
|
variable np = ItemsInList(pref_datasets)
|
|
variable ip
|
|
string sp
|
|
variable found = 0
|
|
if (nds > 0)
|
|
for (ip = 0; ip < np; ip += 1)
|
|
for (ids = 0; ids < nds; ids += 1)
|
|
sds = StringFromList(ids, file_datasets)
|
|
index = ItemsInList(sds, "/") - 1
|
|
mds = StringFromList(index, sds, "/")
|
|
sp = StringFromList(ip, pref_datasets)
|
|
if (StringMatch(mds, sp))
|
|
found = 1
|
|
break
|
|
endif
|
|
endfor
|
|
if (found)
|
|
break
|
|
endif
|
|
endfor
|
|
if (!found)
|
|
ids = 0
|
|
sds = StringFromList(ids, file_datasets)
|
|
endif
|
|
endif
|
|
|
|
return sds
|
|
end
|
|
|
|
/// load a preview dataset from an open PShell HDF5 file.
|
|
///
|
|
/// if the dataset has a maximum of two dimensions, the function loads it at once.
|
|
/// if it has more than two dimension, the function selects and loads one two-dimensional slab.
|
|
///
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded.
|
|
/// if multiple datasets are loaded from a file,
|
|
/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets().
|
|
/// @arg 1 (default) set the wave scaling.
|
|
/// @arg 0 do not set the wave scaling.
|
|
///
|
|
/// @param pref_datasets semicolon-separated list of preferred datasets.
|
|
/// the items of the list are match strings for the Igor StringMatch function.
|
|
/// the first matching dataset is loaded from the file.
|
|
/// if no match is found, the first dataset listed in the file is loaded.
|
|
/// if empty, a hard-coded default preference list is used.
|
|
///
|
|
/// @return name of loaded wave if successful. empty string otherwise.
|
|
///
|
|
function /s psh5_load_scan_preview(fileID, scanpath, [set_scale, pref_datasets])
|
|
variable fileID
|
|
string scanpath
|
|
variable set_scale
|
|
string pref_datasets
|
|
|
|
if (ParamIsDefault(set_scale))
|
|
set_scale = 1
|
|
endif
|
|
if (ParamIsDefault(pref_datasets) || (strlen(pref_datasets) == 0))
|
|
pref_datasets = kPreviewDatasets
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
dfref dataDF = saveDF
|
|
|
|
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
|
|
string datasetname = select_dataset(datasets, pref_datasets)
|
|
string datasetpath
|
|
datasetpath = scanpath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
|
|
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
return ""
|
|
endif
|
|
|
|
string dataname
|
|
if (di.ndims < 2)
|
|
HDF5LoadData /O /Q /Z fileID, datasetpath
|
|
dataname = StringFromList(0, S_waveNames)
|
|
wave /z data = $dataname
|
|
if (waveexists(data))
|
|
ps_set_dimlabels(data)
|
|
endif
|
|
else
|
|
variable dim2start = 0
|
|
variable dim2count = 1
|
|
variable dim3start = 0
|
|
variable dim3count = 1
|
|
if (di.ndims >= 3)
|
|
dim2start = floor(di.dims[2] / 2)
|
|
dim2count = 1
|
|
endif
|
|
if (di.ndims >= 4)
|
|
dim3start = floor(di.dims[3] / 2)
|
|
dim3count = 1
|
|
endif
|
|
|
|
dataname = psh5_load_dataset_slab(fileID, scanpath, datasetname, dim2start, dim2count, dim3start, dim3count)
|
|
endif
|
|
|
|
wave /z data = $dataname
|
|
if (waveexists(data))
|
|
if (set_scale)
|
|
setdatafolder dataDF
|
|
string positioners
|
|
string positioner
|
|
string positionerpath
|
|
positioners = psh5_load_scan_meta(fileID, scanpath)
|
|
wave /t /z ScanWritables
|
|
if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1))
|
|
positioner = ScanWritables[0]
|
|
if (strlen(positioner) > 0)
|
|
positionerpath = scanpath + "/" + positioner
|
|
positionerpath = ReplaceString("//", positionerpath, "/")
|
|
HDF5LoadData /O /Q /Z fileID, positionerpath
|
|
endif
|
|
endif
|
|
|
|
setdatafolder dataDF
|
|
newdatafolder /o/s attr
|
|
psh5_load_scan_attrs(fileID, scanpath, attr_sets=2)
|
|
setdatafolder dataDF
|
|
ps_scale_dataset(data)
|
|
endif
|
|
else
|
|
dataname = ""
|
|
endif
|
|
|
|
return dataname
|
|
end
|
|
|
|
/// load a longitudinal section of a scan from an open PShell HDF5 file.
|
|
///
|
|
/// the dataset must have three dimensions.
|
|
///
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1".
|
|
///
|
|
/// @param dim reserved, must be 0.
|
|
///
|
|
/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded.
|
|
/// if multiple datasets are loaded from a file,
|
|
/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets().
|
|
/// @arg 1 (default) set the wave scaling.
|
|
/// @arg 0 do not set the wave scaling.
|
|
///
|
|
/// @param pref_datasets semicolon-separated list of preferred datasets.
|
|
/// the items of the list are match strings for the Igor StringMatch function.
|
|
/// the first matching dataset is loaded from the file.
|
|
/// if no match is found, the first dataset listed in the file is loaded.
|
|
/// if empty, a hard-coded default preference list is used.
|
|
///
|
|
/// @return name of loaded wave if successful. empty string otherwise.
|
|
///
|
|
/// @warning EXPERIMENTAL: this function is under development.
|
|
///
|
|
function /s psh5_load_scan_section(fileID, scanpath, dim, [set_scale, pref_datasets])
|
|
variable fileID
|
|
string scanpath
|
|
variable dim
|
|
variable set_scale
|
|
string pref_datasets
|
|
|
|
// select first dimension (future argument)
|
|
// 0 = first dimension is x axis (energy of scienta image)
|
|
dim = 0
|
|
|
|
if (ParamIsDefault(set_scale))
|
|
set_scale = 1
|
|
endif
|
|
if (ParamIsDefault(pref_datasets) || (strlen(pref_datasets) == 0))
|
|
pref_datasets = kPreviewDatasets
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
dfref dataDF = saveDF
|
|
|
|
string datasets = psh5_list_scan_datasets(fileID, scanpath)
|
|
string datasetname = select_dataset(datasets, pref_datasets)
|
|
string datasetpath
|
|
datasetpath = scanpath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
string dataname = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/")
|
|
string destname = dataname[0,29] + num2str(dim)
|
|
|
|
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
return ""
|
|
elseif (di.ndims != 3)
|
|
print "error: rank of dataset != 3"
|
|
return ""
|
|
endif
|
|
|
|
variable idx, idy, idz, idt
|
|
variable transpose = WhichListItem(dataname, kTransposedDatasets) >= 0
|
|
if (transpose)
|
|
idx = 1
|
|
idy = 0
|
|
else
|
|
idx = 0
|
|
idy = 1
|
|
endif
|
|
idz = 2
|
|
idt = 3
|
|
|
|
variable nx, ny, nz
|
|
nx = di.dims[idx]
|
|
ny = di.dims[idy]
|
|
nz = di.dims[idz]
|
|
|
|
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
|
|
wave slab
|
|
slab[][%Start] = 0
|
|
slab[][%Stride] = 1
|
|
slab[][%Count] = 1
|
|
slab[][%Block] = 1
|
|
|
|
if (dim == 0)
|
|
slab[idy][%Start] = floor(ny / 2)
|
|
slab[idx][%Block] = nx
|
|
make /n=(nx,nz) /o $destname
|
|
else
|
|
slab[idx][%Start] = floor(nx / 2)
|
|
slab[idy][%Block] = ny
|
|
make /n=(ny,nz) /o $destname
|
|
endif
|
|
slab[idz][%Block] = nz
|
|
wave data = $destname
|
|
data = 0
|
|
|
|
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath
|
|
if (!v_flag)
|
|
wave slabdata
|
|
if (transpose)
|
|
data += slabdata[0][p][q][0]
|
|
else
|
|
data += slabdata[p][0][q][0]
|
|
endif
|
|
endif
|
|
killwaves /z slab, slabdata
|
|
|
|
if (set_scale)
|
|
make /n=(1,1,1) /free dummy
|
|
ps_set_dimlabels2(dummy, dataname)
|
|
setdimlabel 0, -1, $GetDimLabel(dummy, dim, -1), data
|
|
setdimlabel 1, -1, $kScanDimLabel, data
|
|
|
|
setdatafolder dataDF
|
|
string positioners
|
|
string positioner
|
|
string positionerpath
|
|
positioners = psh5_load_scan_meta(fileID, scanpath)
|
|
wave /t /z ScanWritables
|
|
if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1))
|
|
positioner = ScanWritables[0]
|
|
if (strlen(positioner) > 0)
|
|
positionerpath = scanpath + "/" + positioner
|
|
positionerpath = ReplaceString("//", positionerpath, "/")
|
|
HDF5LoadData /O /Q /Z fileID, positionerpath
|
|
endif
|
|
endif
|
|
|
|
setdatafolder dataDF
|
|
newdatafolder /o/s attr
|
|
killwaves /a/z
|
|
psh5_load_scan_attrs(fileID, scanpath, attr_sets=2)
|
|
setdatafolder dataDF
|
|
ps_scale_dataset(data)
|
|
endif
|
|
|
|
return destname
|
|
end
|
|
|
|
/// load metadata of a PShell dataset.
|
|
///
|
|
/// "metadata" are the HDF5 attributes attached to the scan dataset.
|
|
///
|
|
/// data is added to the wave note.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param datapath path to the containing group in the HDF5 file.
|
|
/// path separator is the slash "/".
|
|
///
|
|
/// @param datasetname name of the dataset.
|
|
/// may include relative path.
|
|
///
|
|
/// @param datawave metadata is added to the wave note of this wave.
|
|
///
|
|
/// @return 0 if successful, non-zero if an error occurred.
|
|
///
|
|
function psh5_load_dataset_meta(fileID, datapath, datasetname, datawave)
|
|
variable fileID
|
|
string datapath
|
|
string datasetname
|
|
wave datawave
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
SetDataFolder NewFreeDataFolder()
|
|
|
|
string datasetpath = datapath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
string wnote
|
|
|
|
HDF5LoadData /O /Q /Z /A="Writable Dimension" /N=WriteDim fileID, datasetpath
|
|
if (!v_flag)
|
|
wave WriteDim
|
|
// scan dimension starts at 1
|
|
sprintf wnote, "ScanDimension=%u", WriteDim[0]
|
|
Note datawave, wnote
|
|
endif
|
|
|
|
HDF5LoadData /O /Q /Z /A="Writable Index" /N=WriteIndex fileID, datasetpath
|
|
if (!v_flag)
|
|
wave WriteIndex
|
|
sprintf wnote, "WriteableIndex=%u", WriteIndex[0]
|
|
Note datawave, wnote
|
|
endif
|
|
|
|
HDF5LoadData /O /Q /Z /A="Readable Index" /N=ReadIndex fileID, datasetpath
|
|
if (!v_flag)
|
|
wave ReadIndex
|
|
sprintf wnote, "ReadableIndex=%u", ReadIndex[0]
|
|
Note datawave, wnote
|
|
endif
|
|
|
|
setdatafolder saveDF
|
|
return 0
|
|
end
|
|
|
|
/// load a dataset slab-wise from the open PShell HDF5 file.
|
|
///
|
|
/// the function loads the dataset image by image using the hyperslab option.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param datapath path to the containing group in the HDF5 file.
|
|
/// path separator is the slash "/".
|
|
///
|
|
/// @param dataset name of the dataset.
|
|
/// also defines the name of the loaded wave.
|
|
///
|
|
/// @param progress select whether a progress window is displayed during the process.
|
|
/// @arg 1 (default) show progress window.
|
|
/// @arg 0 do not show progress window.
|
|
///
|
|
/// @return name of loaded wave if successful. empty string otherwise.
|
|
///
|
|
function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress])
|
|
variable fileID
|
|
string datapath
|
|
string datasetname
|
|
variable progress
|
|
|
|
if (ParamIsDefault(progress))
|
|
progress = 1
|
|
endif
|
|
|
|
variable result = 0
|
|
string datasetpath
|
|
string datawavename
|
|
datasetpath = datapath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/")
|
|
|
|
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
return ""
|
|
endif
|
|
if (di.ndims < 2)
|
|
print "error: rank of dataset < 2"
|
|
return ""
|
|
elseif (di.ndims < 3)
|
|
progress = 0
|
|
endif
|
|
|
|
variable idx, idy, idz, idt, izt
|
|
variable transpose = WhichListItem(datawavename, kTransposedDatasets) >= 0
|
|
if (transpose)
|
|
idx = 1
|
|
idy = 0
|
|
else
|
|
idx = 0
|
|
idy = 1
|
|
endif
|
|
idz = 2
|
|
idt = 3
|
|
|
|
variable nx, ny, nz, nt, nzt
|
|
nx = di.dims[idx]
|
|
ny = di.dims[idy]
|
|
nz = di.dims[idz]
|
|
nt = di.dims[idt]
|
|
make /n=(nx,ny,nz,nt) /o $datawavename
|
|
wave data = $datawavename
|
|
|
|
nz = max(nz, 1)
|
|
nt = max(nt, 1)
|
|
nzt = nz * nt
|
|
izt = 0
|
|
if (progress)
|
|
display_progress_panel("HDF5 Import", "Loading data...", nzt)
|
|
endif
|
|
|
|
// load data image by image
|
|
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
|
|
wave slab
|
|
slab[][%Start] = 0
|
|
slab[][%Stride] = 1
|
|
slab[][%Count] = 1
|
|
slab[][%Block] = 1
|
|
slab[idx][%Block] = nx
|
|
slab[idy][%Block] = ny
|
|
|
|
variable iz, it
|
|
for (iz = 0; iz < nz; iz += 1)
|
|
for (it = 0; it < nt; it += 1)
|
|
slab[idz][%Start] = iz
|
|
slab[idt][%Start] = it
|
|
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath
|
|
wave slabdata // 2D, 3D, or 4D with singletons
|
|
if (transpose)
|
|
data[][][iz][it] = slabdata[q][p][0][0]
|
|
else
|
|
data[][][iz][it] = slabdata[p][q][0][0]
|
|
endif
|
|
|
|
// progress window
|
|
izt += 1
|
|
if (progress)
|
|
if (update_progress_panel(izt))
|
|
result = -4 // user abort
|
|
break
|
|
endif
|
|
endif
|
|
endfor
|
|
if (result < 0)
|
|
break
|
|
endif
|
|
endfor
|
|
|
|
if (progress)
|
|
kill_progress_panel()
|
|
endif
|
|
|
|
killwaves /z slab, slabdata
|
|
if (!result)
|
|
ps_set_dimlabels(data)
|
|
return datawavename
|
|
else
|
|
killwaves /z data
|
|
return ""
|
|
endif
|
|
end
|
|
|
|
/// load a single image from the open PShell data file.
|
|
///
|
|
/// the function can average over a region in the extra dimensions.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param datapath path to the containing group in the HDF5 file.
|
|
/// path separator is the slash "/".
|
|
///
|
|
/// @param dataset name of the dataset.
|
|
/// also defines the name of the loaded wave.
|
|
///
|
|
/// @param dim2start 2nd dimension coordinate of the first image
|
|
/// set to 0 if dimension may not be present
|
|
///
|
|
/// @param dim2count number of subsequent images to average
|
|
/// set to 1 if dimension may not be present
|
|
///
|
|
/// @param dim3start 3rd dimension coordinate of the first image
|
|
/// set to 0 if dimension may not be present
|
|
///
|
|
/// @param dim3count number of subsequent images to average
|
|
/// set to 1 if dimension may not be present
|
|
///
|
|
/// @return name of loaded wave if successful. empty string otherwise.
|
|
///
|
|
function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim2count, dim3start, dim3count)
|
|
variable fileID
|
|
string datapath
|
|
string datasetname
|
|
variable dim2start
|
|
variable dim2count
|
|
variable dim3start
|
|
variable dim3count
|
|
|
|
string datasetpath
|
|
string datawavename
|
|
datasetpath = datapath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/")
|
|
|
|
STRUCT HDF5DataInfo di
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
return ""
|
|
endif
|
|
if (di.ndims < 2)
|
|
print "error: rank of dataset < 2"
|
|
return ""
|
|
endif
|
|
|
|
variable idx, idy, idz, idt
|
|
variable transpose = WhichListItem(datawavename, kTransposedDatasets) >= 0
|
|
if (transpose)
|
|
idx = 1
|
|
idy = 0
|
|
else
|
|
idx = 0
|
|
idy = 1
|
|
endif
|
|
idz = 2
|
|
idt = 3
|
|
|
|
variable nx, ny
|
|
nx = di.dims[idx]
|
|
ny = di.dims[idy]
|
|
make /n=(nx,ny) /o $datawavename
|
|
wave data = $datawavename
|
|
data = 0
|
|
|
|
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
|
|
wave slab
|
|
slab[][%Start] = 0
|
|
slab[][%Stride] = 1
|
|
slab[][%Count] = 1
|
|
slab[][%Block] = 1
|
|
slab[idx][%Block] = nx
|
|
slab[idy][%Block] = ny
|
|
|
|
variable iz, it
|
|
variable navg = 0
|
|
variable dim2end = dim2start + dim2count - 1
|
|
variable dim3end = dim3start + dim3count - 1
|
|
for (iz = dim2start; iz <= dim2end; iz += 1)
|
|
for (it = dim3start; it <= dim3end; it += 1)
|
|
slab[idz][%Start] = iz
|
|
slab[idt][%Start] = it
|
|
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath
|
|
if (!v_flag)
|
|
wave slabdata
|
|
if (transpose)
|
|
data += slabdata[q][p][0][0]
|
|
else
|
|
data += slabdata[p][q][0][0]
|
|
endif
|
|
navg += 1
|
|
endif
|
|
endfor
|
|
endfor
|
|
if (navg)
|
|
data /= navg
|
|
endif
|
|
|
|
killwaves /z slab, slabdata
|
|
ps_set_dimlabels(data)
|
|
return datawavename
|
|
end
|
|
|
|
/// set dimension labels according to the axis type
|
|
///
|
|
/// this function asserts a particular ordering of dimensions types
|
|
/// based on the name of the wave for
|
|
/// ScientaImage, ScientaSpectrum, ImageAngleDistribution, ImageEnergyDistribution.
|
|
/// all other waves must be one-dimensional, and the dimension must be the scan dimension.
|
|
///
|
|
/// dimension labels are required by scaling functions.
|
|
///
|
|
/// @param data data wave as loaded from PShell file
|
|
///
|
|
/// @return @arg 0 all labels set successfully.
|
|
/// @arg 1 unidentified data source.
|
|
/// @arg 2 wave does not contain data.
|
|
///
|
|
function ps_set_dimlabels(data)
|
|
wave data
|
|
|
|
ps_set_dimlabels2(data, NameOfWave(data))
|
|
end
|
|
|
|
/// set dimension labels according to the axis type
|
|
///
|
|
/// same as ps_set_dimlabels() except that the dimension labels are set
|
|
/// according to a separate name argument instead of the wave name.
|
|
///
|
|
/// @param data data wave as loaded from PShell file.
|
|
///
|
|
/// @param name original name of the dataset in the PShell file.
|
|
///
|
|
/// @return @arg 0 all labels set successfully.
|
|
/// @arg 1 unidentified data source.
|
|
/// @arg 2 wave does not contain data.
|
|
///
|
|
function ps_set_dimlabels2(data, name)
|
|
wave data
|
|
string name
|
|
|
|
variable dummy
|
|
try
|
|
// intrinsic dimensions
|
|
strswitch(name)
|
|
case "ScientaImage":
|
|
setdimlabel 0, -1, $kEnergyDimLabel, data
|
|
setdimlabel 1, -1, $kAngleDimLabel, data
|
|
if (WaveDims(data) >= 3)
|
|
setdimlabel 2, -1, $kScanDimLabel, data
|
|
endif
|
|
AbortOnRTE
|
|
break
|
|
case "ImageAngleDistribution":
|
|
case "ScientaAngleDistribution":
|
|
if (WaveDims(data) >= 2)
|
|
setdimlabel 0, -1, $kScanDimLabel, data
|
|
setdimlabel 1, -1, $kAngleDimLabel, data
|
|
else
|
|
setdimlabel 0, -1, $kAngleDimLabel, data
|
|
endif
|
|
AbortOnRTE
|
|
break
|
|
case "ScientaSpectrum":
|
|
case "ImageEnergyDistribution":
|
|
case "ScientaEnergyDistribution":
|
|
if (WaveDims(data) >= 2)
|
|
setdimlabel 0, -1, $kScanDimLabel, data
|
|
setdimlabel 1, -1, $kEnergyDimLabel, data
|
|
else
|
|
setdimlabel 0, -1, $kEnergyDimLabel, data
|
|
endif
|
|
AbortOnRTE
|
|
break
|
|
default:
|
|
if (WaveDims(data) == 1)
|
|
setdimlabel 0, -1, $kScanDimLabel, data
|
|
AbortOnRTE
|
|
else
|
|
return 1
|
|
endif
|
|
endswitch
|
|
catch
|
|
dummy = GetRTError(1)
|
|
return 2
|
|
endtry
|
|
return 0
|
|
end
|
|
|
|
/// find the scan folder
|
|
///
|
|
/// the scan folder is the one that contains the :attr folder
|
|
/// the data and scan folders may refer to the same folder.
|
|
///
|
|
static function /df find_scan_folder(dataDF)
|
|
dfref dataDF
|
|
|
|
dfref attrDF = dataDF:attr
|
|
if (!DataFolderRefStatus(attrDF))
|
|
string df = GetDataFolder(1, dataDF) + ":"
|
|
dfref scanDF = $df
|
|
else
|
|
dfref scanDF = dataDF
|
|
endif
|
|
return scanDF
|
|
end
|
|
|
|
/// find the attributes data folder
|
|
///
|
|
/// this is the :attr folder.
|
|
///
|
|
static function /df find_attr_folder(dataDF)
|
|
dfref dataDF
|
|
|
|
dfref attrDF = dataDF:attr
|
|
if (!DataFolderRefStatus(attrDF))
|
|
string df = GetDataFolder(1, dataDF) + ":"
|
|
dfref scanDF = $df
|
|
dfref attrDF = scanDF:attr
|
|
endif
|
|
return attrDF
|
|
end
|
|
|
|
/// set the dimension scales of loaded PShell Scienta datasets according to attributes.
|
|
///
|
|
/// datasets listed in the ScanReadables waves are scaled
|
|
/// according to the attribute waves in the data, scan, and attributes folders,
|
|
/// whichever is found first.
|
|
///
|
|
/// the current datafolder must contain the ScanReadables wave and the :attr folder.
|
|
/// the ScanReadables text wave contains names of the waves to scale.
|
|
/// wave names can include a relative path to a sub-folder. the path separator is "/".
|
|
///
|
|
/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels().
|
|
/// this is implicitly done by the high-level load functions.
|
|
///
|
|
/// @version this function supports regions from version 1.03.
|
|
/// check that you're in the correct data folder!
|
|
///
|
|
function ps_scale_datasets()
|
|
dfref scanDF = GetDataFolderDFR()
|
|
dfref attrDF = find_attr_folder(scanDF)
|
|
|
|
make /n=3 /free lo, hi
|
|
make /n=3 /t /free ax, un
|
|
wave /t /z /SDFR=scanDF ScanReadables
|
|
if (WaveExists(ScanReadables))
|
|
variable isr
|
|
variable nsr = numpnts(ScanReadables)
|
|
string ssr
|
|
string sdf
|
|
for (isr = 0; isr < nsr; isr += 1)
|
|
setdatafolder scanDF
|
|
ssr = ScanReadables[isr]
|
|
if (ItemsInList(ssr, "/") >= 2)
|
|
sdf = StringFromList(0, ssr, "/")
|
|
ssr = RemoveListItem(0, ssr, "/")
|
|
setdatafolder $sdf
|
|
endif
|
|
wave /z wsr=$ssr
|
|
if (WaveExists(wsr))
|
|
ps_detect_scale(ax, lo, hi, un)
|
|
ps_scale_dataset_2(wsr, ax, lo, hi, un)
|
|
endif
|
|
endfor
|
|
endif
|
|
setdatafolder scanDF
|
|
end
|
|
|
|
/// set the dimension scales of a loaded PShell Scienta dataset according to attributes.
|
|
///
|
|
/// the current datafolder must contain the :attr folder.
|
|
/// the data wave can be in the current folder or a sub-folder.
|
|
///
|
|
/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels().
|
|
/// this is implicitly done by the high-level load functions.
|
|
///
|
|
/// the function is useful if a single dataset is loaded and scaled.
|
|
/// if multiple datasets are loaded, ps_scale_datasets() is slightly more efficient.
|
|
///
|
|
/// @param data data wave to be scaled.
|
|
/// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels().
|
|
///
|
|
/// @version this function supports regions from version 1.03.
|
|
///
|
|
function ps_scale_dataset(data)
|
|
wave data
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
dfref dataDF = GetWavesDataFolderDFR(data)
|
|
|
|
setdatafolder dataDF
|
|
make /n=3 /free lo, hi
|
|
make /n=3 /t /free ax, un
|
|
ps_detect_scale(ax, lo, hi, un)
|
|
ps_scale_dataset_2(data, ax, lo, hi, un)
|
|
setdatafolder saveDF
|
|
end
|
|
|
|
static function /wave find_scale_wave(name, dataDF, scanDF, attrDF)
|
|
string name
|
|
dfref dataDF
|
|
dfref scanDF
|
|
dfref attrDF
|
|
|
|
wave /SDFR=dataDF /Z w = $name
|
|
if (!WaveExists(w))
|
|
wave /SDFR=scanDF /Z w = $name
|
|
if (!WaveExists(w))
|
|
wave /SDFR=attrDF /Z w = $name
|
|
endif
|
|
endif
|
|
return w
|
|
end
|
|
|
|
/// detect the dimension scales from attributes.
|
|
///
|
|
/// the function checks the data , scan and attributes folders for scan parameters.
|
|
/// the results are written to the provided waves.
|
|
/// the function is normally called by ps_scale_datasets() but can also be used independently.
|
|
///
|
|
/// the current datafolder must be the data or the scan folder.
|
|
/// the data folder contains the waves that are to be scaled.
|
|
/// the scan folder contains the scan positions and the :attr folder.
|
|
///
|
|
/// the provided waves are redimensioned by the function, and dimension labels are set.
|
|
/// the scale parameters can then be extracted by keyword, e.g.,
|
|
/// @arg `lo[%%energy]` analyser energy dimension.
|
|
/// @arg `lo[%%angle]` analyser angle dimension.
|
|
/// @arg `lo[%%scan]` scan dimension.
|
|
/// @arg `lo[%%data]` data dimension.
|
|
///
|
|
/// the function tries to read the following waves, in the data, scan, and attributes folders,
|
|
/// where the first folder in the list takes precedence.
|
|
/// it may fall back to more or less reasonable default values if no data is not found.
|
|
/// @arg `LensMode`
|
|
/// @arg `ScientaChannelBegin`
|
|
/// @arg `ScientaChannelEnd`
|
|
/// @arg `ScientaSliceBegin`
|
|
/// @arg `ScientaSliceEnd`
|
|
/// @arg `ScanWritables`
|
|
/// @arg wave referenced by `ScanWritables[0]`
|
|
///
|
|
/// @param ax text wave to receive the axis labels.
|
|
///
|
|
/// @param lo wave to receive the lower limits.
|
|
///
|
|
/// @param hi wave to receive the upper limits.
|
|
///
|
|
/// @param un text wave to receive the unit labels.
|
|
///
|
|
/// @return the function results are written to the lo, hi, un, and ax waves.
|
|
///
|
|
/// @version this function supports regions from version 1.03.
|
|
/// check that you're in the correct data folder!
|
|
///
|
|
function ps_detect_scale(ax, lo, hi, un)
|
|
wave /t ax
|
|
wave lo
|
|
wave hi
|
|
wave /t un
|
|
|
|
dfref dataDF = GetDataFolderDFR()
|
|
dfref scanDF = find_scan_folder(dataDF)
|
|
dfref attrDF = find_attr_folder(dataDF)
|
|
|
|
redimension /n=4 lo, hi, un, ax
|
|
setdimlabel 0, 0, $kEnergyDimLabel, lo, hi, un, ax
|
|
setdimlabel 0, 1, $kAngleDimLabel, lo, hi, un, ax
|
|
setdimlabel 0, 2, $kScanDimLabel, lo, hi, un, ax
|
|
setdimlabel 0, 3, $kDataDimLabel, lo, hi, un, ax
|
|
|
|
// default values
|
|
lo[%$kEnergyDimLabel] = 0
|
|
hi[%$kEnergyDimLabel] = 1
|
|
un[%$kEnergyDimLabel] = "eV"
|
|
ax[%$kEnergyDimLabel] = "Ekin"
|
|
|
|
lo[%$kAngleDimLabel] = -1
|
|
hi[%$kAngleDimLabel] = 1
|
|
un[%$kAngleDimLabel] = "arb."
|
|
un[%$kAngleDimLabel] = "slice"
|
|
|
|
lo[%$kScanDimLabel] = 0
|
|
hi[%$kScanDimLabel] = 1
|
|
un[%$kScanDimLabel] = "arb."
|
|
ax[%$kScanDimLabel] = "scan"
|
|
|
|
lo[%$kDataDimLabel] = 0
|
|
hi[%$kDataDimLabel] = 0
|
|
un[%$kDataDimLabel] = "arb."
|
|
ax[%$kDataDimLabel] = "value"
|
|
|
|
wave /SDFR=attrDF /T /Z LensMode
|
|
wave /Z ChannelBegin = find_scale_wave("ScientaChannelBegin", dataDF, scanDF, attrDF)
|
|
wave /Z ChannelEnd = find_scale_wave("ScientaChannelEnd", dataDF, scanDF, attrDF)
|
|
wave /Z SliceBegin = find_scale_wave("ScientaSliceBegin", dataDF, scanDF, attrDF)
|
|
wave /Z SliceEnd = find_scale_wave("ScientaSliceEnd", dataDF, scanDF, attrDF)
|
|
|
|
// lens mode can give more detail
|
|
if (waveexists(LensMode) && (numpnts(LensMode) >= 1))
|
|
strswitch(LensMode[0])
|
|
case "Angular45":
|
|
lo[%$kAngleDimLabel] = -45/2
|
|
hi[%$kAngleDimLabel] = +45/2
|
|
un[%$kAngleDimLabel] = "°"
|
|
ax[%$kAngleDimLabel] = "angle"
|
|
break
|
|
case "Angular60":
|
|
lo[%$kAngleDimLabel] = -60/2
|
|
hi[%$kAngleDimLabel] = +60/2
|
|
un[%$kAngleDimLabel] = "°"
|
|
ax[%$kAngleDimLabel] = "angle"
|
|
break
|
|
case "Transmission":
|
|
un[%$kAngleDimLabel] = "arb."
|
|
ax[%$kAngleDimLabel] = "offset"
|
|
break
|
|
endswitch
|
|
endif
|
|
|
|
// best option if scales are explicit in separate waves
|
|
if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1))
|
|
lo[%$kEnergyDimLabel] = ChannelBegin[0]
|
|
hi[%$kEnergyDimLabel] = ChannelEnd[0]
|
|
endif
|
|
if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1))
|
|
lo[%$kAngleDimLabel] = SliceBegin[0]
|
|
hi[%$kAngleDimLabel] = SliceEnd[0]
|
|
endif
|
|
|
|
wave /z /t /SDFR=scanDF ScanWritables
|
|
if (WaveExists(ScanWritables))
|
|
wave /z /SDFR=scanDF scanner = $ScanWritables[0]
|
|
if (!WaveExists(scanner))
|
|
wave /z /SDFR=attrDF scanner = $ScanWritables[0]
|
|
endif
|
|
if (WaveExists(scanner) && (numpnts(scanner) >= 1))
|
|
lo[%$kScanDimLabel] = scanner[0]
|
|
hi[%$kScanDimLabel] = scanner[numpnts(scanner)-1]
|
|
ax[%$kScanDimLabel] = NameOfWave(scanner)
|
|
strswitch(NameOfWave(scanner))
|
|
case "Eph":
|
|
ax[%$kScanDimLabel] = "photon energy"
|
|
un[%$kScanDimLabel] = "eV"
|
|
break
|
|
case "ManipulatorX":
|
|
case "ManipulatorY":
|
|
case "ManipulatorZ":
|
|
case "FocusYTrans":
|
|
case "FocusZTrans":
|
|
case "RefocusYTrans":
|
|
case "RefocusZTrans":
|
|
case "ExitSlitY":
|
|
un[%$kScanDimLabel] = "mm"
|
|
break
|
|
case "ExitSlit":
|
|
un[%$kScanDimLabel] = "µm"
|
|
break
|
|
case "ManipulatorTheta":
|
|
case "ManipulatorTilt":
|
|
case "ManipulatorPhi":
|
|
un[%$kScanDimLabel] = "°"
|
|
break
|
|
case "FocusXRot":
|
|
case "FocusYRot":
|
|
case "FocusZRot":
|
|
case "RefocusXRot":
|
|
case "RefocusYRot":
|
|
case "RefocusZRot":
|
|
un[%$kScanDimLabel] = "mrad"
|
|
break
|
|
endswitch
|
|
endif
|
|
endif
|
|
end
|
|
|
|
/// set the dimension scales of a dataset.
|
|
///
|
|
/// the function is normally called by ps_scale_datasets() but can also be used independently.
|
|
/// the limits and units must be given as function arguments with proper dimension labels.
|
|
///
|
|
/// the provided limit and unit waves must have dimension labels
|
|
/// matching the -1 index dimension labels of the data wave,
|
|
/// such as set by the ps_detect_scale() function.
|
|
/// the scale parameters are extracted by keyword, e.g.,
|
|
/// @arg `lo[%%energy]` analyser energy dimension.
|
|
/// @arg `lo[%%angle]` analyser angle dimension.
|
|
/// @arg `lo[%%scan]` scan dimension.
|
|
/// @arg `lo[%%data]` data dimension.
|
|
///
|
|
/// if the data dimension labels and units are at their defaults ("value" and "arb.", respectively),
|
|
/// the function tries to read them from the existing wave note ("AxisLabelD" and "AxisUnitD"),
|
|
/// or based on the wave name if the name is one of the known measurement variables:
|
|
/// "ScientaImage", "ImageAngleDistribution", "ScientaAngleDistribution", "ScientaSpectrum", "ImageEnergyDistribution", "ScientaEnergyDistribution",
|
|
/// "SampleCurrent", "RefCurrent", "AuxCurrent", "MachineCurrent".
|
|
///
|
|
/// @param data data wave to be scaled.
|
|
/// dimension labels (index -1) must be set to match the limit waves.
|
|
///
|
|
/// @param ax axis labels.
|
|
/// the axis labels are written to the wave note in the format `AxisLabel%%s=%%s`
|
|
/// where `X`, `Y`, `Z`, `D` is substituted for the first place holder
|
|
/// and the label for the second one.
|
|
///
|
|
/// @param lo lower limits.
|
|
/// the lower limits are applied using the SetScale operation.
|
|
///
|
|
/// @param hi upper limits.
|
|
/// the upper limits are applied using the SetScale operation.
|
|
///
|
|
/// @param un unit labels.
|
|
/// the unit labels are applied using the SetScale operation.
|
|
///
|
|
/// @version this function supports regions from version 1.03.
|
|
///
|
|
function ps_scale_dataset_2(data, ax, lo, hi, un)
|
|
wave data
|
|
wave /t ax
|
|
wave lo
|
|
wave hi
|
|
wave /t un
|
|
|
|
string snote = note(data)
|
|
string sdim
|
|
sdim = GetDimLabel(data, 0, -1)
|
|
if (strlen(sdim))
|
|
setscale /i x lo[%$sdim], hi[%$sdim], un[%$sdim], data
|
|
snote = ReplaceStringByKey("AxisLabelX", snote, ax[%$sdim], "=", "\r")
|
|
endif
|
|
|
|
sdim = GetDimLabel(data, 1, -1)
|
|
if (strlen(sdim))
|
|
setscale /i y lo[%$sdim], hi[%$sdim], un[%$sdim], data
|
|
snote = ReplaceStringByKey("AxisLabelY", snote, ax[%$sdim], "=", "\r")
|
|
endif
|
|
|
|
sdim = GetDimLabel(data, 2, -1)
|
|
if (strlen(sdim))
|
|
setscale /i z lo[%$sdim], hi[%$sdim], un[%$sdim], data
|
|
snote = ReplaceStringByKey("AxisLabelZ", snote, ax[%$sdim], "=", "\r")
|
|
endif
|
|
|
|
string data_unit = un[%$kDataDimLabel]
|
|
string data_label = ax[%$kDataDimLabel]
|
|
string s
|
|
variable def = (cmpstr(data_unit, "arb.") == 0) && (cmpstr(data_label, "value") == 0)
|
|
|
|
if (def)
|
|
s = StringByKey("AxisLabelD", snote, "=", "\r")
|
|
if (strlen(s) > 0)
|
|
data_label = s
|
|
def = 0
|
|
endif
|
|
s = StringByKey("AxisUnitD", snote, "=", "\r")
|
|
if (strlen(s) > 0)
|
|
data_unit = s
|
|
def = 0
|
|
endif
|
|
endif
|
|
|
|
if (def)
|
|
strswitch(NameOfWave(data))
|
|
case "ScientaImage":
|
|
case "ImageAngleDistribution":
|
|
case "ScientaAngleDistribution":
|
|
case "ScientaSpectrum":
|
|
case "ImageEnergyDistribution":
|
|
case "ScientaEnergyDistribution":
|
|
data *= kDetectorSensitivity
|
|
data_unit = "counts"
|
|
data_label = "intensity"
|
|
def = 0
|
|
break
|
|
case "SampleCurrent":
|
|
case "RefCurrent":
|
|
case "AuxCurrent":
|
|
data_unit = "A"
|
|
data_label = "current"
|
|
def = 0
|
|
break
|
|
case "MachineCurrent":
|
|
data_unit = "mA"
|
|
data_label = "current"
|
|
def = 0
|
|
break
|
|
endswitch
|
|
endif
|
|
|
|
setscale d 0, 0, data_unit, data
|
|
snote = ReplaceStringByKey("AxisLabelD", snote, data_label, "=", "\r")
|
|
snote = ReplaceStringByKey("AxisUnitD", snote, data_unit, "=", "\r")
|
|
snote = ReplaceStringByKey("Dataset", snote, NameOfWave(data), "=", "\r")
|
|
note /k data, snote
|
|
end
|
|
|
|
/// load and reduce the ScientaImage dataset of the first scan of a PShell data file.
|
|
///
|
|
/// the resulting dataset is reduced in one image dimension by a user-defined reduction function,
|
|
/// e.g. by region-of-interest integration, curve fitting, etc.
|
|
/// cf. @ref adh5_default_reduction for further details.
|
|
///
|
|
/// the function loads the dataset image by image using the hyperslab option
|
|
/// and applies a custom reduction function to each image.
|
|
/// the results from the reduction function are composed into one result wave.
|
|
/// the raw data are discarded.
|
|
///
|
|
/// if the data is from the electron analyser driver and some special attributes are included,
|
|
/// the function will set the scales of the image dimensions.
|
|
///
|
|
/// by default, the reduction function is called in separate threads to reduce the total loading time.
|
|
/// (see the global variable psh5_perf_secs which reports the total run time of the function.)
|
|
/// the effect varies depending on the balance between file loading (image size)
|
|
/// and data processing (complexity of the reduction function).
|
|
/// for debugging the reduction function, multi-threading can be disabled.
|
|
///
|
|
/// @param ANickName destination folder name (top level under root).
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed.
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up.
|
|
///
|
|
/// @param reduction_func custom data reduction function.
|
|
/// this can be any user-defined function which has the same parameters as @ref adh5_default_reduction.
|
|
/// some reduction functions are predefined in the @ref PearlScientaPreprocess module.
|
|
///
|
|
/// @param reduction_param parameter string for the reduction function.
|
|
///
|
|
/// @param dataset name of dataset to load, optionally including group path relative to scan (scan 1).
|
|
/// by default, the function looks for a ScientaImage dataset.
|
|
/// in a multi-region scan, this will be region 1.
|
|
/// to select region 2, e.g., use `dataset="region2/ScientaImage"`.
|
|
///
|
|
/// @param progress progress window.
|
|
/// @arg 1 (default) show progress window
|
|
/// @arg 0 do not show progress window
|
|
///
|
|
/// @param nthreads
|
|
/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread).
|
|
/// @arg 0 use main thread only (for debugging and profiling).
|
|
/// @arg >= 1 use a fixed number of (additional) threads.
|
|
///
|
|
/// @return semicolon-separated list of the loaded dataset `ReducedData1`, `ReducedData2`, etc. if successful.
|
|
/// auxiliary waves, scan positions, attributes are loaded but not listed in the string.
|
|
/// empty string if an error occurred.
|
|
/// error messages are printed to the history.
|
|
///
|
|
/// @return global string s_filepath in new data folder contains the full file path on disk.
|
|
///
|
|
/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file.
|
|
///
|
|
function /s psh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [dataset, progress, nthreads])
|
|
string ANickName
|
|
string APathName
|
|
string AFileName
|
|
funcref adh5_default_reduction reduction_func
|
|
string reduction_param
|
|
string dataset
|
|
variable progress
|
|
variable nthreads
|
|
|
|
if (ParamIsDefault(progress))
|
|
progress = 1
|
|
endif
|
|
if (ParamIsDefault(nthreads))
|
|
nthreads = -1
|
|
endif
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
|
|
// performance monitoring
|
|
variable timerRefNum
|
|
variable /g psh5_perf_secs
|
|
timerRefNum = startMSTimer
|
|
|
|
variable fileID = psh5_open_file(ANickName, APathName, AFileName)
|
|
string wavenames = ""
|
|
if (fileID)
|
|
dfref fileDF = GetDataFolderDFR()
|
|
svar s_filepath
|
|
svar s_scanpaths
|
|
AFileName = s_filepath
|
|
print "loading " + s_filepath + "\r"
|
|
|
|
variable ig = 0
|
|
variable ng = ItemsInList(s_scanpaths)
|
|
string scanpath
|
|
string folder
|
|
string positioners
|
|
string positioner
|
|
string positionerpath
|
|
|
|
scanpath = StringFromList(ig, s_scanpaths)
|
|
folder = ReplaceString("/", scanpath, "")
|
|
folder = ReplaceString(" ", folder, "")
|
|
folder = PearlCleanupName(folder)
|
|
setdatafolder fileDF
|
|
newdatafolder /s /o $folder
|
|
dfref dataDF = GetDataFolderDFR()
|
|
positioners = psh5_load_scan_meta(fileID, scanpath)
|
|
newdatafolder /s /o attr
|
|
killwaves /a/z
|
|
psh5_load_scan_attrs(fileID, scanpath)
|
|
setdatafolder dataDF
|
|
wave /t /z ScanWritables
|
|
if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1))
|
|
positioner = ScanWritables[0]
|
|
if (strlen(positioner) > 0)
|
|
positionerpath = scanpath + "/" + positioner
|
|
positionerpath = ReplaceString("//", positionerpath, "/")
|
|
HDF5LoadData /O /Q /Z fileID, positionerpath
|
|
endif
|
|
endif
|
|
|
|
setdatafolder dataDF
|
|
string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1)
|
|
if (ParamIsDefault(dataset))
|
|
dataset = select_dataset(datasets, "ScientaImage")
|
|
endif
|
|
wavenames = psh5_load_dataset_reduced(fileID, scanpath, dataset, reduction_func, reduction_param, progress=progress, nthreads=nthreads)
|
|
|
|
psh5_close_file(fileID)
|
|
endif
|
|
|
|
if (timerRefNum >= 0)
|
|
psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
|
endif
|
|
|
|
setdatafolder saveDF
|
|
return wavenames
|
|
end
|
|
|
|
|
|
/// load a reduced dataset from the open PShell HDF5 file.
|
|
///
|
|
/// the function loads the dataset image by image using the hyperslab option
|
|
/// and applies a custom reduction function to each image.
|
|
/// the results from the reduction function are written to the `ReducedData1`, `ReducedData2`, etc. waves.
|
|
/// the raw data are discarded.
|
|
///
|
|
/// by default, the reduction function is called in separate threads to reduce the total loading time.
|
|
/// (see the global variable psh5_perf_secs which reports the total run time of the function.)
|
|
/// the effect varies depending on the balance between file loading (image size)
|
|
/// and data processing (complexity of the reduction function).
|
|
/// for debugging the reduction function, multi-threading can be disabled.
|
|
///
|
|
/// if the reduction function requires the image waves to be scaled properly,
|
|
/// the attributes must have been loaded by psh5_load_scan_attrs() before.
|
|
/// in this case, the scales of the result waves are also set by the function.
|
|
/// otherwise, the results can also be scaled by ps_scale_dataset() later.
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to scan group in the HDF5 file.
|
|
///
|
|
/// @param datasetname name of the dataset.
|
|
/// this must currently be "ScientaImage", other data is not supported.
|
|
/// the name of the loaded wave is a cleaned up version of the dataset name.
|
|
/// the name can include the region name as a relative path, e.g. "region1/ScientaImage".
|
|
/// in this case, the dataset is loaded into a sub-folder named "region1".
|
|
///
|
|
/// @param reduction_func custom data reduction function.
|
|
/// this can be any user-defined function which has the same parameters as @ref adh5_default_reduction.
|
|
/// some reduction functions are predefined in the @ref PearlScientaPreprocess module.
|
|
///
|
|
/// @param reduction_param parameter string for the reduction function.
|
|
///
|
|
/// @param progress progress window.
|
|
/// @arg 1 (default) show progress window
|
|
/// @arg 0 do not show progress window
|
|
///
|
|
/// @param nthreads
|
|
/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread).
|
|
/// @arg 0 use main thread only (for debugging and profiling).
|
|
/// @arg >= 1 use a fixed number of (additional) threads.
|
|
///
|
|
/// @return semicolon-separated list of the loaded dataset `ReducedData1`, `ReducedData2`, etc. if successful.
|
|
/// auxiliary waves, scan positions, attributes are loaded but not listed in the string.
|
|
/// empty string if an error occurred.
|
|
/// error messages are printed to the history.
|
|
///
|
|
/// @version this function supports regions as of version 1.03.
|
|
///
|
|
function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_func, reduction_param, [progress, nthreads])
|
|
variable fileID
|
|
string scanpath
|
|
string datasetname
|
|
funcref adh5_default_reduction reduction_func
|
|
string reduction_param
|
|
variable progress
|
|
variable nthreads
|
|
|
|
if (ParamIsDefault(progress))
|
|
progress = 1
|
|
endif
|
|
if (ParamIsDefault(nthreads))
|
|
nthreads = -1
|
|
endif
|
|
|
|
dfref base_df = GetDataFolderDFR()
|
|
variable result = 0
|
|
string datasetpath
|
|
string datawavename
|
|
string wavenames = ""
|
|
|
|
datasetpath = scanpath + "/" + datasetname
|
|
datasetpath = ReplaceString("//", datasetpath, "/")
|
|
datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/")
|
|
|
|
string regionname
|
|
string regionpath
|
|
if (ItemsInList(datasetname, "/") >= 2)
|
|
regionname = StringFromList(0, datasetname, "/")
|
|
regionpath = ReplaceString("//", scanpath + "/" + regionname, "/")
|
|
datasetname = RemoveListItem(0, datasetname, "/")
|
|
NewDataFolder /o/s $regionname
|
|
else
|
|
regionname = ""
|
|
regionpath = scanpath
|
|
endif
|
|
|
|
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
|
InitHDF5DataInfo(di)
|
|
variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di)
|
|
if (err != 0)
|
|
print "error accessing detector/data"
|
|
result = -1
|
|
return wavenames
|
|
endif
|
|
if (di.ndims < 2)
|
|
print "error: rank of dataset < 2"
|
|
result = -2
|
|
return wavenames
|
|
elseif (di.ndims < 3)
|
|
progress = 0
|
|
endif
|
|
|
|
variable idx, idy, idz, idt
|
|
variable transpose = WhichListItem(datawavename, kTransposedDatasets) >= 0
|
|
if (transpose)
|
|
idx = 1
|
|
idy = 0
|
|
else
|
|
idx = 0
|
|
idy = 1
|
|
endif
|
|
idz = 2
|
|
idt = 3
|
|
|
|
variable nx, ny, nz, nt, nzt
|
|
nx = di.dims[idx]
|
|
ny = di.dims[idy]
|
|
nz = di.dims[idz]
|
|
nt = di.dims[idt]
|
|
// adjust singleton dimensions
|
|
nz = max(nz, 1)
|
|
nt = max(nt, 1)
|
|
nzt = nz * nt
|
|
|
|
// load data image by image
|
|
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
|
|
wave slab
|
|
slab[][%Start] = 0
|
|
slab[][%Stride] = 1
|
|
slab[][%Count] = 1
|
|
slab[][%Block] = 1
|
|
slab[idx][%Block] = nx
|
|
slab[idy][%Block] = ny
|
|
|
|
// set up multi threading
|
|
if (nthreads < 0)
|
|
nthreads = ThreadProcessorCount
|
|
endif
|
|
if (nthreads > 0)
|
|
variable threadGroupID = ThreadGroupCreate(nthreads)
|
|
variable ithread
|
|
for (ithread = 0; ithread < nthreads; ithread += 1)
|
|
ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func)
|
|
endfor
|
|
else
|
|
make /n=(nzt) /df /free processing_folders
|
|
endif
|
|
|
|
if (progress)
|
|
display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt)
|
|
endif
|
|
|
|
// create a template wave with the correct scales and labels
|
|
make /n=(nx,ny) /d /o $datawavename
|
|
wave template = $datawavename
|
|
ps_set_dimlabels2(template, datawavename)
|
|
ps_scale_dataset(template)
|
|
|
|
variable iz, it, izt
|
|
string dfname
|
|
variable iw, nw
|
|
string sw
|
|
make /n=0 /free /wave result_waves
|
|
|
|
izt = 0
|
|
for (iz = 0; iz < nz; iz += 1)
|
|
for (it = 0; it < nt; it += 1)
|
|
// load hyperslab
|
|
slab[idz][%Start] = iz
|
|
slab[idt][%Start] = it
|
|
dfname = "processing_" + num2str(izt)
|
|
newdatafolder /s $dfname
|
|
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath
|
|
|
|
// send to processing queue
|
|
duplicate template, image
|
|
variable /g r_index = iz
|
|
variable /g s_index = it
|
|
string /g func_param = reduction_param
|
|
|
|
if (nthreads > 0)
|
|
WaveClear image
|
|
ThreadGroupPutDF threadGroupID, :
|
|
else
|
|
processing_folders[izt] = GetDataFolderDFR()
|
|
make /n=1/d profile1, profile2
|
|
wave slabdata
|
|
wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param)
|
|
variable /g func_result = numpnts(reduced_waves)
|
|
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
|
WaveClear slabdata, image, reduced_waves
|
|
setdatafolder ::
|
|
endif
|
|
|
|
izt += 1
|
|
// progress window
|
|
if (progress)
|
|
if (update_progress_panel(izt))
|
|
print "user abort"
|
|
result = -4
|
|
break
|
|
endif
|
|
endif
|
|
endfor
|
|
endfor
|
|
|
|
killwaves /z slab, slabdata, template
|
|
if (progress)
|
|
update_progress_panel(0, message="Processing data (step 2 of 2)...")
|
|
endif
|
|
|
|
dfref dfr
|
|
for (izt = 0; (izt < nzt) && (result == 0); izt += 1)
|
|
if (nthreads > 0)
|
|
do
|
|
if (progress)
|
|
if (update_progress_panel(izt))
|
|
print "user abort"
|
|
result = -4
|
|
break
|
|
endif
|
|
endif
|
|
dfr = ThreadGroupGetDFR(threadGroupID, 1000)
|
|
if (DatafolderRefStatus(dfr) != 0)
|
|
break
|
|
endif
|
|
while (1)
|
|
else
|
|
if (progress)
|
|
if (update_progress_panel(izt))
|
|
print "user abort"
|
|
result = -4
|
|
break
|
|
endif
|
|
endif
|
|
dfr = processing_folders[izt]
|
|
endif
|
|
|
|
if (result != 0)
|
|
break
|
|
endif
|
|
|
|
nvar rr = dfr:r_index
|
|
nvar ss = dfr:s_index
|
|
nvar func_result = dfr:func_result
|
|
|
|
if (func_result < 1)
|
|
print "error during data reduction."
|
|
result = -3
|
|
break
|
|
endif
|
|
|
|
if (numpnts(result_waves) == 0)
|
|
redimension /n=(func_result) result_waves
|
|
for (iw = 0; iw < func_result; iw += 1)
|
|
sw = "redw_" + num2str(iw)
|
|
wave profile = dfr:$sw
|
|
sw = "ReducedData" + num2str(iw+1)
|
|
make /n=(dimsize(profile, 0), nz, nt) /d /o $sw
|
|
wave data = $sw
|
|
setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data
|
|
setdimlabel 1, -1, $kScanDimLabel, data
|
|
note data, note(profile)
|
|
ps_scale_dataset(data)
|
|
setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data
|
|
setscale d 0, 0, waveunits(profile, -1), data
|
|
result_waves[iw] = data
|
|
endfor
|
|
endif
|
|
for (iw = 0; iw < func_result; iw += 1)
|
|
sw = "redw_" + num2str(iw)
|
|
wave profile = dfr:$sw
|
|
wave data = result_waves[iw]
|
|
data[][rr][ss] = profile[p]
|
|
endfor
|
|
endfor
|
|
|
|
if (nthreads > 0)
|
|
variable tstatus = ThreadGroupRelease(threadGroupID)
|
|
if (tstatus == -2)
|
|
print "error: thread did not terminate properly."
|
|
result = -5
|
|
endif
|
|
else
|
|
for (izt = 0; izt < nzt; izt += 1)
|
|
KillDataFolder /Z processing_folders[izt]
|
|
endfor
|
|
endif
|
|
|
|
if (result == 0)
|
|
nw = numpnts(result_waves)
|
|
wavenames = ""
|
|
for (iw = 0; iw < nw; iw += 1)
|
|
wave data = result_waves[iw]
|
|
if (nz == 1)
|
|
redimension /n=(-1, 0, 0) data
|
|
elseif (nt == 1)
|
|
redimension /n=(-1, nz, 0) data
|
|
endif
|
|
wavenames += nameofwave(data) + ";"
|
|
endfor
|
|
endif
|
|
if (progress)
|
|
kill_progress_panel()
|
|
endif
|
|
|
|
setdatafolder base_df
|
|
return wavenames
|
|
end
|
|
|
|
threadsafe static function reduce_slab_worker(reduction_func)
|
|
funcref adh5_default_reduction reduction_func
|
|
do
|
|
// wait for job from main thread
|
|
do
|
|
dfref dfr = ThreadGroupGetDFR(0, 1000)
|
|
if (DataFolderRefStatus(dfr) == 0)
|
|
if (GetRTError(2))
|
|
return 0 // no more jobs
|
|
endif
|
|
else
|
|
break
|
|
endif
|
|
while (1)
|
|
|
|
// get input data
|
|
wave slabdata = dfr:slabdata
|
|
wave image = dfr:image
|
|
svar func_param = dfr:func_param
|
|
nvar rr = dfr:r_index
|
|
nvar ss = dfr:s_index
|
|
|
|
// do the work
|
|
newdatafolder /s outDF
|
|
variable /g r_index = rr
|
|
variable /g s_index = ss
|
|
wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param)
|
|
variable /g func_result = numpnts(reduced_waves)
|
|
|
|
// send output to queue and clean up
|
|
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
|
WaveClear slabdata, image, reduced_waves
|
|
ThreadGroupPutDF 0, :
|
|
KillDataFolder dfr
|
|
while (1)
|
|
|
|
return 0
|
|
end
|
|
|
|
threadsafe static function /wave reduce_slab_image(slabdata, image, reduction_func, reduction_param)
|
|
wave slabdata
|
|
wave image
|
|
funcref adh5_default_reduction reduction_func
|
|
string reduction_param
|
|
|
|
// the multiplication by detector sensitivity assumes that we are loading a ScientaImage.
|
|
image = slabdata[q][p][0][0] * kDetectorSensitivity
|
|
|
|
return reduction_func(image, reduction_param)
|
|
end
|
|
|
|
/// load descriptive info from a PShell data file.
|
|
///
|
|
/// the info string lists the following information for each scan contained in the file:
|
|
/// - path of the scan group inside the file.
|
|
/// - number of scan positions.
|
|
/// - dataset names of scan positioners.
|
|
/// - dataset names of detectors.
|
|
///
|
|
/// @param APathName igor symbolic path name. can be empty if the path is specified in AFileName or a dialog box should be displayed
|
|
///
|
|
/// @param AFileName if empty a dialog box shows up
|
|
///
|
|
/// @return newline terminated string.
|
|
///
|
|
function /s psh5_load_info(APathName, AFileName)
|
|
string APathName
|
|
string AFileName
|
|
|
|
dfref saveDF = GetDataFolderDFR()
|
|
dfref fileDF = NewFreeDataFolder()
|
|
setdatafolder fileDF
|
|
|
|
variable fileID
|
|
string filepath
|
|
string scanpaths
|
|
variable nscans
|
|
variable iscan
|
|
string scanpath
|
|
string info = ""
|
|
|
|
HDF5OpenFile /P=$APathName /R fileID as AFileName
|
|
if (v_flag == 0)
|
|
filepath = s_path + s_filename
|
|
scanpaths = psh5_list_scans(fileID)
|
|
nscans = ItemsInList(scanpaths)
|
|
for (iscan = 0; iscan < nscans; iscan += 1)
|
|
scanpath = StringFromList(iscan, scanpaths)
|
|
info = info + scanpath + "\r"
|
|
info = info + psh5_load_scan_info(fileID, scanpath)
|
|
endfor
|
|
HDF5CloseFile fileID
|
|
endif
|
|
|
|
setdatafolder saveDF
|
|
return info
|
|
end
|
|
|
|
/// load descriptive info from a PShell scan.
|
|
///
|
|
/// the info string contains up to three lines which are made up of the following information:
|
|
/// - number of scan positions.
|
|
/// - dataset names of scan positioners.
|
|
/// - dataset names of detectors (without region names).
|
|
/// - region names
|
|
///
|
|
/// @param fileID ID of open HDF5 file from psh5_open_file().
|
|
///
|
|
/// @param scanpath path to scan group in the HDF5 file.
|
|
///
|
|
/// @return newline terminated string.
|
|
///
|
|
function /s psh5_load_scan_info(fileID, scanpath)
|
|
variable fileID
|
|
string scanpath
|
|
|
|
string info = ""
|
|
string positions = ""
|
|
string positioners = ""
|
|
string readables = ""
|
|
string detectors = ""
|
|
string regions = ""
|
|
|
|
psh5_load_scan_meta(fileID, scanpath)
|
|
|
|
wave /z ScanDimensions
|
|
wave /t /z ScanWritables
|
|
wave /t /z ScanReadables
|
|
wave /z ScanSteps
|
|
|
|
if (WaveExists(ScanSteps) && (numpnts(ScanSteps) >= 1))
|
|
ScanSteps += 1
|
|
positions = "positions = (" + wave2list(ScanSteps, "%u", ",") + ")"
|
|
info = AddListItem(positions, info, "\r", inf)
|
|
endif
|
|
if (WaveExists(ScanWritables) && (numpnts(ScanWritables) >= 1))
|
|
positioners = "positioners = " + twave2list(ScanWritables, ",")
|
|
info = AddListItem(positioners, info, "\r", inf)
|
|
endif
|
|
|
|
variable i, m, n
|
|
string s
|
|
if (WaveExists(ScanReadables) && (numpnts(ScanReadables) >= 1))
|
|
readables = twave2list(ScanReadables, ",")
|
|
n = ItemsInList(readables, ",")
|
|
for (i = 0; i < n; i += 1)
|
|
s = StringFromList(i, readables, ",")
|
|
m = ItemsInList(s, "/")
|
|
if (m > 1)
|
|
s = StringFromList(m - 1, s, "/")
|
|
endif
|
|
if (WhichListItem(s, detectors, ",") < 0)
|
|
detectors = AddListItem(s, detectors, ",", inf)
|
|
endif
|
|
endfor
|
|
detectors = "detectors = " + detectors
|
|
info = AddListItem(detectors, info, "\r", inf)
|
|
endif
|
|
|
|
regions = psh5_list_scan_regions(fileID, scanpath)
|
|
if (strlen(regions) > 0)
|
|
regions = "regions = " + regions
|
|
info = AddListItem(regions, info, "\r", inf)
|
|
endif
|
|
|
|
return info
|
|
end
|
|
|
|
/// convert text wave to list.
|
|
///
|
|
///
|
|
static function /s twave2list(wt, sep)
|
|
wave /t wt
|
|
string sep
|
|
|
|
string list = ""
|
|
variable n = numpnts(wt)
|
|
variable i
|
|
for (i = 0; i < n; i += 1)
|
|
list = AddListItem(wt[i], list, sep, inf)
|
|
endfor
|
|
|
|
return list
|
|
end
|
|
|
|
/// convert numeric wave to list.
|
|
///
|
|
///
|
|
static function /s wave2list(w, format, sep)
|
|
wave w
|
|
string format
|
|
string sep
|
|
|
|
string list = ""
|
|
variable n = numpnts(w)
|
|
variable i
|
|
string s
|
|
for (i = 0; i < n; i += 1)
|
|
sprintf s, format, w[i]
|
|
list = AddListItem(s, list, sep, inf)
|
|
endfor
|
|
|
|
return list
|
|
end
|
|
|
|
/// kill any waves matching a pattern in the experiment
|
|
///
|
|
/// this may be used to kill big waves of original data before saving
|
|
///
|
|
function /s kill_matching_waves(dfr, pattern, recurse, [killed])
|
|
DFREF dfr
|
|
string pattern
|
|
variable recurse
|
|
string killed
|
|
|
|
if (ParamIsDefault(killed))
|
|
killed = ""
|
|
endif
|
|
|
|
string s
|
|
string r
|
|
variable index = 0
|
|
do
|
|
Wave/Z w = WaveRefIndexedDFR(dfr, index)
|
|
if (!WaveExists(w))
|
|
break
|
|
endif
|
|
|
|
s = NameOfWave(w)
|
|
if (stringmatch(s, pattern))
|
|
killwaves /z w
|
|
killed = AddListItem(s, killed, ";", Inf)
|
|
endif
|
|
|
|
index += 1
|
|
while(1)
|
|
|
|
if (recurse)
|
|
Variable numChildDataFolders = CountObjectsDFR(dfr, 4)
|
|
Variable i
|
|
for(i=0; i<numChildDataFolders; i+=1)
|
|
String childDFName = GetIndexedObjNameDFR(dfr, 4, i)
|
|
DFREF childDFR = dfr:$childDFName
|
|
killed = kill_matching_waves(childDFR, pattern, 1, killed=killed)
|
|
endfor
|
|
endif
|
|
|
|
return killed
|
|
End
|