2066 lines
58 KiB
Igor
2066 lines
58 KiB
Igor
#pragma rtGlobals=3 // Use modern global access method and strict wave access.
|
||
#pragma IgorVersion = 6.2
|
||
#pragma ModuleName = PearlAreaImport
|
||
#include <HDF5 Browser>
|
||
#include "pearl-compat"
|
||
#include "pearl-gui-tools"
|
||
|
||
// copyright (c) 2013-18 Paul Scherrer Institut
|
||
//
|
||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||
// you may not use this file except in compliance with the License.
|
||
// You may obtain a copy of the License at
|
||
// http:///www.apache.org/licenses/LICENSE-2.0
|
||
|
||
/// @file
|
||
/// @brief HDF5 file import from EPICS area detectors
|
||
/// @ingroup ArpesPackage
|
||
///
|
||
///
|
||
/// HDF5 file import from EPICS area detectors
|
||
/// such as CCD cameras, 2D electron analysers
|
||
///
|
||
/// as of Igor 6.3, Igor can open datasets of up to rank 4.
|
||
/// i.e. the extra dimension Y of the file plugin cannot be used.
|
||
/// the extra dimensions N and X are supported.
|
||
///
|
||
/// @author matthias muntwiler, matthias.muntwiler@psi.ch
|
||
///
|
||
/// @copyright 2013-18 Paul Scherrer Institut @n
|
||
/// Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||
/// you may not use this file except in compliance with the License. @n
|
||
/// You may obtain a copy of the License at
|
||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||
|
||
/// @namespace PearlAreaImport
|
||
/// @brief HDF5 file import from EPICS area detectors
|
||
///
|
||
/// PearlAreaImport is declared in @ref pearl-area-import.ipf.
|
||
|
||
/// callback function for drag&drop of HDF5 files into Igor.
|
||
///
|
||
static function BeforeFileOpenHook(refNum,fileName,path,type,creator,kind)
|
||
variable refNum, kind
|
||
string fileName, path, type, creator
|
||
|
||
variable handledOpen = 0
|
||
|
||
//PathInfo $path
|
||
//string FilePath = s_path + filename
|
||
string NickName = PearlCleanupName(ParseFilePath(3, FileName, ":", 0, 0))
|
||
string FileExt = LowerStr(ParseFilePath(4, FileName, ":", 0, 0))
|
||
string result = ""
|
||
|
||
// override nickname with custom setting
|
||
svar /z cnn = gsCustomNickName
|
||
if (svar_exists(cnn))
|
||
if (exists("gvNickNameIndex") != 2)
|
||
variable/g gvNickNameIndex = 1
|
||
endif
|
||
nvar nni = gvNickNameIndex
|
||
NickName = cnn + num2str(nni)
|
||
nni += 1
|
||
endif
|
||
|
||
if (stringmatch(FileExt, "h5") == 1)
|
||
result = adh5_load_complete(NickName, path, FileName)
|
||
endif
|
||
|
||
string/g s_latest_datafile = result
|
||
string/g s_latest_nickname = nickname
|
||
|
||
handledOpen = strlen(result) > 0
|
||
if (handledOpen)
|
||
close refnum
|
||
endif
|
||
|
||
return handledOpen // 1 tells Igor not to open the file
|
||
End
|
||
|
||
/// generate the name of a data folder based on a file name.
|
||
///
|
||
/// if the file name follows the naming convention source-date-index.extension,
|
||
/// the function tries to generate the nick name as source_date_index.
|
||
/// otherwise it's just a cleaned up version of the file name.
|
||
///
|
||
/// date must be in yyyymmdd or yymmdd format and is clipped to the short yymmdd format.
|
||
/// index should be a running numeric index of up to 6 digits, or the time encoded as hhmmss.
|
||
/// however, in the current version index can be any string that can be a valid Igor folder name.
|
||
///
|
||
/// @param filename file name, including extension. can also include a folder path (which is ignored).
|
||
/// the extension is currently ignored, but may be used to select the parent folder in a later version.
|
||
/// @param ignoredate if non-zero, the nick name will not include the date part.
|
||
/// defaults to zero.
|
||
/// @param sourcename nick name of the data source.
|
||
/// by default, the function tries to detect the source from the file name.
|
||
/// this option can be used to override auto-detection.
|
||
/// the automatic source names are:
|
||
/// sci (scienta by area detector),
|
||
/// psh (pshell),
|
||
/// sl (optics slit camera by area detector),
|
||
/// es (end station camera by area detector),
|
||
/// xy (unidentified).
|
||
/// @param unique if non-zero, the resulting name is made a unique data folder name in the current data folder
|
||
/// defaults to zero.
|
||
///
|
||
function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique])
|
||
string filename
|
||
variable ignoredate
|
||
string sourcename
|
||
variable unique
|
||
|
||
if (ParamIsDefault(ignoredate))
|
||
ignoredate = 0
|
||
endif
|
||
if (ParamIsDefault(unique))
|
||
unique = 0
|
||
endif
|
||
|
||
string basename = ParseFilePath(3, filename, ":", 0, 0)
|
||
string extension = ParseFilePath(4, filename, ":", 0, 0)
|
||
string nickname
|
||
|
||
string autosource
|
||
if (strsearch(basename, "scienta", 0, 2) >= 0)
|
||
autosource = "sci"
|
||
elseif (strsearch(basename, "pshell", 0, 2) >= 0)
|
||
autosource = "psh"
|
||
elseif (strsearch(basename, "OP-SL", 0, 2) >= 0)
|
||
autosource = "sl"
|
||
elseif (strsearch(basename, "ES-PS", 0, 2) >= 0)
|
||
autosource = "es"
|
||
else
|
||
autosource = "xy"
|
||
endif
|
||
if (ParamIsDefault(sourcename))
|
||
sourcename = autosource
|
||
endif
|
||
|
||
variable nparts = ItemsInList(basename, "-")
|
||
if (nparts >= 3)
|
||
string datepart = StringFromList(1, basename, "-")
|
||
variable l_datepart = strlen(datepart)
|
||
if (l_datepart == 8)
|
||
datepart = datepart[l_datepart-6, l_datepart-1]
|
||
endif
|
||
string indexpart = StringFromList(2, basename, "-")
|
||
if (ignoredate)
|
||
sprintf nickname, "%s_%s", sourcename, indexpart
|
||
else
|
||
sprintf nickname, "%s_%s_%s", sourcename, datepart, indexpart
|
||
endif
|
||
else
|
||
nickname = PearlCleanupName(basename)
|
||
endif
|
||
|
||
if (unique && CheckName(nickname, 11))
|
||
nickname = UniqueName(nickname + "_", 11, 0)
|
||
endif
|
||
|
||
return nickname
|
||
end
|
||
|
||
/// load area detector data files selected in a file dialog window
|
||
///
|
||
/// @param APathName Igor symbolic path name.
|
||
/// if empty, Igor will choose a folder on its own
|
||
function ad_load_dialog(APathName)
|
||
string APathName
|
||
|
||
variable refNum
|
||
string message = "Select data files"
|
||
string filepaths
|
||
string filefilters = "Area Detector HDF5 Files (*.h5):.h5;"
|
||
filefilters += "All Files:.*;"
|
||
|
||
PathInfo /S $APathName
|
||
Open /D /R /F=filefilters /M=message /MULT=1 refNum
|
||
filepaths = S_fileName
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
setdatafolder root:
|
||
|
||
if (strlen(filepaths) > 0)
|
||
variable nfiles = ItemsInList(filepaths, "\r")
|
||
variable ifile
|
||
for(ifile = 0; ifile < nfiles; ifile += 1)
|
||
String path = StringFromList(ifile, filepaths, "\r")
|
||
string nickname = ad_suggest_foldername(path)
|
||
adh5_load_complete(nickname, "", path)
|
||
endfor
|
||
endif
|
||
|
||
setdatafolder saveDF
|
||
end
|
||
|
||
/// import everything from a HDF5 file created by the Area Detector software.
|
||
///
|
||
/// if the data is from the electron analyser driver and some special attributes are included,
|
||
/// the function will set the scales of the image dimensions.
|
||
///
|
||
/// @param ANickName destination folder name (top level under root)
|
||
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
||
/// @param AFileName if empty a dialog box shows up
|
||
/// @param load_data 1 (default): load data; 0: do not load data
|
||
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
|
||
/// for proper wave scaling, the attributes must be loaded
|
||
function /s adh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr])
|
||
string ANickName
|
||
string APathName
|
||
string AFileName
|
||
variable load_data
|
||
variable load_attr
|
||
|
||
if (ParamIsDefault(load_data))
|
||
load_data = 1
|
||
endif
|
||
if (ParamIsDefault(load_attr))
|
||
load_attr = 1
|
||
endif
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
setdatafolder root:
|
||
newdatafolder /s/o $("root:" + ANickName)
|
||
|
||
// open file
|
||
variable fileID
|
||
string instrumentpath = "/entry/instrument/"
|
||
string detectorpath = instrumentpath + "detector/"
|
||
string attributespath = instrumentpath + "NDAttributes/"
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// performance monitoring
|
||
variable timerRefNum
|
||
variable /g adh5_perf_secs
|
||
timerRefNum = startMSTimer
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5OpenFile")
|
||
HDF5OpenFile /P=$APathName/R fileID as AFileName
|
||
if (v_flag == 0)
|
||
AFileName = s_path + s_filename
|
||
print "loading " + s_filename + "\r"
|
||
|
||
if (load_data)
|
||
adh5_load_detector_slabs(fileID, detectorpath)
|
||
endif
|
||
if (load_attr)
|
||
newdatafolder /o/s attr
|
||
adh5_loadattr_all(fileID, attributespath)
|
||
setdatafolder ::
|
||
endif
|
||
|
||
wave /z data
|
||
if (waveexists(data))
|
||
//adh5_redim(data) // not to be used with adh5_load_detector_slabs
|
||
adh5_scale(data)
|
||
endif
|
||
|
||
HDF5CloseFile fileID
|
||
else
|
||
AFileName = ""
|
||
endif
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
if (timerRefNum >= 0)
|
||
adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
||
endif
|
||
|
||
setdatafolder saveDF
|
||
return AFileName
|
||
end
|
||
|
||
/// load and reduce a dataset from a HDF5 file created by the Area Detector software.
|
||
///
|
||
/// the resulting dataset is reduced in one image dimension by a user-defined reduction function,
|
||
/// e.g. by region-of-interest integration, curve fitting, etc.
|
||
///
|
||
/// the function loads the dataset image by image using the hyperslab option
|
||
/// and applies a custom reduction function to each image.
|
||
/// the results from the reduction function are composed into one result wave.
|
||
/// the raw data are discarded.
|
||
///
|
||
/// if the data is from the electron analyser driver and some special attributes are included,
|
||
/// the function will set the scales of the image dimensions.
|
||
///
|
||
/// @param ANickName destination folder name (top level under root)
|
||
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
||
/// @param AFileName if empty a dialog box shows up
|
||
///
|
||
/// @param reduction_func custom reduction function
|
||
/// (any user-defined function which has the same parameters as adh5_default_reduction())
|
||
/// @param reduction_param parameter string for the reduction function
|
||
///
|
||
/// @param load_data 1 (default): load data; 0: do not load data
|
||
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
|
||
/// for proper wave scaling, the attributes must be loaded
|
||
/// @param progress 1 (default): show progress window; 0: do not show progress window
|
||
///
|
||
function /s adh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [load_data, load_attr, progress])
|
||
string ANickName
|
||
string APathName
|
||
string AFileName
|
||
|
||
funcref adh5_default_reduction reduction_func
|
||
string reduction_param
|
||
|
||
variable load_data
|
||
variable load_attr
|
||
variable progress
|
||
|
||
if (ParamIsDefault(load_data))
|
||
load_data = 1
|
||
endif
|
||
if (ParamIsDefault(load_attr))
|
||
load_attr = 1
|
||
endif
|
||
if (ParamIsDefault(progress))
|
||
progress = 1
|
||
endif
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
setdatafolder root:
|
||
newdatafolder /s/o $("root:" + ANickName)
|
||
|
||
// open file
|
||
variable fileID
|
||
string instrumentpath = "/entry/instrument/"
|
||
string detectorpath = instrumentpath + "detector/"
|
||
string attributespath = instrumentpath + "NDAttributes/"
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// performance monitoring
|
||
variable timerRefNum
|
||
variable /g adh5_perf_secs
|
||
timerRefNum = startMSTimer
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5OpenFile")
|
||
HDF5OpenFile /P=$APathName/R fileID as AFileName
|
||
if (v_flag == 0)
|
||
AFileName = s_path + s_filename
|
||
print "loading " + s_filename + "\r"
|
||
|
||
if (load_attr)
|
||
newdatafolder /o/s attr
|
||
adh5_loadattr_all(fileID, attributespath)
|
||
setdatafolder ::
|
||
endif
|
||
if (load_data)
|
||
adh5_load_reduced_detector(fileID, detectorpath, reduction_func, reduction_param, progress=progress)
|
||
endif
|
||
|
||
HDF5CloseFile fileID
|
||
else
|
||
AFileName = ""
|
||
endif
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
if (timerRefNum >= 0)
|
||
adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
||
endif
|
||
|
||
setdatafolder saveDF
|
||
return AFileName
|
||
end
|
||
|
||
/// load a single image from a HDF5 file created by the Area Detector software.
|
||
///
|
||
/// the data wave is loaded into the current data folder.
|
||
/// attributes are loaded into the attr subfolder. existing waves in attr are deleted.
|
||
///
|
||
/// @warning EXPERIMENTAL
|
||
/// this function uses the root:pearl_area:preview data folder. existing data there may be deleted!
|
||
///
|
||
/// @param ANickName destination wave name. the wave is created in the current data folder.
|
||
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
||
/// @param AFileName if empty a dialog box shows up
|
||
/// @param load_data 1 (default): load data; 0: do not load data
|
||
/// @param load_attr 1 (default): load attributes; 0: do not load attributes
|
||
/// note: for correct scaling of the image, the attributes need to be loaded
|
||
function /s adh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr])
|
||
string ANickName
|
||
string APathName
|
||
string AFileName
|
||
variable load_data
|
||
variable load_attr
|
||
|
||
if (ParamIsDefault(load_data))
|
||
load_data = 1
|
||
endif
|
||
if (ParamIsDefault(load_attr))
|
||
load_attr = 1
|
||
endif
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
setdatafolder root:
|
||
newdatafolder /o/s pearl_area
|
||
newdatafolder /o/s preview
|
||
|
||
// open file
|
||
variable fileID
|
||
string instrumentpath = "/entry/instrument/"
|
||
string detectorpath = instrumentpath + "detector/"
|
||
string attributespath = instrumentpath + "NDAttributes/"
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// performance monitoring
|
||
variable timerRefNum
|
||
variable /g adh5_perf_secs
|
||
timerRefNum = startMSTimer
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5OpenFile")
|
||
HDF5OpenFile /P=$APathName/R/Z fileID as AFileName
|
||
if (v_flag == 0)
|
||
AFileName = s_path + s_filename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return ""
|
||
endif
|
||
if (di.ndims < 2)
|
||
print "error: rank of dataset < 2"
|
||
return ""
|
||
endif
|
||
|
||
variable dim2start = 0, dim2count = 1, dim3start = 0, dim3count = 1
|
||
if (di.ndims >= 3)
|
||
dim2start = floor(di.dims[di.ndims - 3] / 2)
|
||
dim2count = 1
|
||
endif
|
||
if (di.ndims >= 4)
|
||
dim3start = floor(di.dims[di.ndims - 4] / 2)
|
||
dim3count = 1
|
||
endif
|
||
|
||
if (load_data)
|
||
adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, dim3start, dim3count)
|
||
wave /z data
|
||
string destpath = GetDataFolder(1, saveDF) + ANickName
|
||
if (waveexists(data))
|
||
duplicate /o data, $destpath
|
||
wave /z data = $destpath
|
||
endif
|
||
endif
|
||
|
||
if (load_attr)
|
||
setdatafolder saveDF
|
||
newdatafolder /o/s attr
|
||
killwaves /a/z
|
||
adh5_loadattr_all(fileID, attributespath)
|
||
setdatafolder ::
|
||
if (waveexists(data))
|
||
adh5_scale(data)
|
||
endif
|
||
endif
|
||
|
||
HDF5CloseFile fileID
|
||
else
|
||
print "error opening file " + AFileName
|
||
AFileName = ""
|
||
endif
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
if (timerRefNum >= 0)
|
||
adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
|
||
endif
|
||
|
||
setdatafolder saveDF
|
||
return AFileName
|
||
end
|
||
|
||
/// load descriptive info from a HDF5 file created by the Area Detector software.
|
||
///
|
||
/// the information returned is the array size and active scans
|
||
///
|
||
/// @attention EXPERIMENTAL
|
||
/// this function should be merged with adh5_load_preview
|
||
///
|
||
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
|
||
/// @param AFileName if empty a dialog box shows up
|
||
///
|
||
function /s adh5_load_info(APathName, AFileName)
|
||
string APathName
|
||
string AFileName
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
|
||
// open file
|
||
variable fileID
|
||
string instrumentpath = "/entry/instrument/"
|
||
string detectorpath = instrumentpath + "detector/"
|
||
string attributespath = instrumentpath + "NDAttributes/"
|
||
string datasetname
|
||
string datawavename
|
||
|
||
string s_info = ""
|
||
string s
|
||
|
||
variable idim
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5OpenFile")
|
||
HDF5OpenFile /P=$APathName/R/Z fileID as AFileName
|
||
if (v_flag == 0)
|
||
AFileName = s_path + s_filename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return ""
|
||
endif
|
||
|
||
for (idim = 0; idim < di.ndims; idim += 1)
|
||
sprintf s, "dim %u: %u points", idim, di.dims[idim]
|
||
if (strlen(s_info) > 0)
|
||
s_info = s_info + "\r" + s
|
||
else
|
||
s_info = s
|
||
endif
|
||
endfor
|
||
|
||
dfref df = NewFreeDataFolder()
|
||
setdatafolder df
|
||
adh5_loadattr_all(fileID, attributespath)
|
||
|
||
for (idim = 1; idim < 5; idim += 1)
|
||
sprintf s, "Scan%uActive", idim
|
||
wave /z w = $s
|
||
if (waveexists(w) && (numpnts(w) > 0) && (w[0] > 0))
|
||
sprintf s, "Scan%uPositioner1", idim
|
||
wave /t wt = $s
|
||
sprintf s, "scan %u: %s", idim, wt[0]
|
||
if (strlen(s_info) > 0)
|
||
s_info = s_info + "\r" + s
|
||
else
|
||
s_info = s
|
||
endif
|
||
endif
|
||
endfor
|
||
|
||
HDF5CloseFile fileID
|
||
else
|
||
print "error opening file " + AFileName
|
||
AFileName = ""
|
||
endif
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
setdatafolder saveDF
|
||
return s_info
|
||
end
|
||
|
||
/// load the detector dataset from the open HDF5 file.
|
||
///
|
||
/// the function loads the whole dataset at once
|
||
/// and redimensions it so that the image dimensions are X and Y
|
||
///
|
||
/// @param fileID ID of open HDF5 file from HDF5OpenFile
|
||
/// @param detectorpath path to detector group in the HDF5 file
|
||
///
|
||
function adh5_load_detector(fileID, detectorpath)
|
||
variable fileID
|
||
string detectorpath
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5LoadData")
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return -1
|
||
endif
|
||
if (di.ndims < 2)
|
||
print "error: rank of dataset < 2"
|
||
return -2
|
||
endif
|
||
|
||
HDF5LoadData /O /Q /Z fileID, datasetname
|
||
wave data
|
||
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
end
|
||
|
||
/// redimension a multi-dimensional area detector array loaded from HDF5.
|
||
///
|
||
/// so that the image dimensions are X and Y
|
||
/// singleton dimensions are removed (most common cases only)
|
||
///
|
||
/// in the redimensioned array, the original dimension type is noted in the dimension label:
|
||
/// AD_Dim0 = first image dimension
|
||
/// AD_Dim1 = second image dimension
|
||
/// AD_DimN = frame sequence
|
||
/// AD_DimX = extra dimension X
|
||
/// AD_DimY = extra dimension Y (cannot be loaded in Igor)
|
||
///
|
||
/// @param data area detector data loaded from HDF5 to be redimensioned
|
||
///
|
||
function adh5_redim(data)
|
||
wave data
|
||
|
||
duplicate /free data, tempdata
|
||
variable nd = wavedims(tempdata)
|
||
variable nx = dimsize(tempdata, nd - 1)
|
||
variable ny = dimsize(tempdata, nd - 2)
|
||
variable nz = dimsize(tempdata, nd - 3)
|
||
variable nt = dimsize(tempdata, nd - 4)
|
||
|
||
switch (nd)
|
||
case 2:
|
||
if (nx <= 1)
|
||
redimension /n=(ny) data
|
||
setdimlabel 0, -1, AD_Dim1, data
|
||
data = tempdata[p][0]
|
||
elseif (ny <= 1)
|
||
redimension /n=(nx) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
data = tempdata[0][p]
|
||
else
|
||
redimension /n=(nx,ny) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
data = tempdata[q][p]
|
||
endif
|
||
break
|
||
case 3:
|
||
if (nx <= 1)
|
||
redimension /n=(ny,nz) data
|
||
setdimlabel 0, -1, AD_Dim1, data
|
||
setdimlabel 1, -1, AD_DimN, data
|
||
multithread data = tempdata[q][p][0]
|
||
elseif (ny <= 1)
|
||
redimension /n=(nx,nz) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_DimN, data
|
||
multithread data = tempdata[q][0][p]
|
||
elseif (nz <= 1)
|
||
redimension /n=(nx,ny) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
multithread data = tempdata[0][q][p]
|
||
else
|
||
redimension /n=(nx,ny,nz) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
setdimlabel 2, -1, AD_DimN, data
|
||
multithread data = tempdata[r][q][p]
|
||
endif
|
||
break
|
||
case 4:
|
||
if (nz <= 1)
|
||
// singleton "frame number" dimension
|
||
redimension /n=(nx,ny,nt) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
setdimlabel 2, -1, AD_DimX, data
|
||
multithread data = tempdata[r][0][q][p]
|
||
else
|
||
redimension /n=(nx,ny,nz,nt) data
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
setdimlabel 2, -1, AD_DimN, data
|
||
setdimlabel 3, -1, AD_DimX, data
|
||
multithread data = tempdata[s][r][q][p]
|
||
endif
|
||
break
|
||
endswitch
|
||
end
|
||
|
||
/// find the attributes data folder of an area detector dataset.
|
||
///
|
||
/// since version 1.04 attributes should be stored in a subfolder named attr.
|
||
/// earlier versions had the attributes in the same data folder as the actual dataset.
|
||
///
|
||
/// @param data wave containing the main dataset.
|
||
///
|
||
/// @return data folder reference of the attributes folder.
|
||
/// the reference may be invalid (and default to root) if the folder cannot be found,
|
||
/// cf. built-in DataFolderRefStatus function.
|
||
static function /DF GetAttrDataFolderDFR(data)
|
||
wave data
|
||
|
||
dfref dataDF = GetWavesDataFolderDFR(data)
|
||
dfref attrDF = dataDF:attr
|
||
if (DataFolderRefStatus(attrDF) == 0)
|
||
attrDF = dataDF
|
||
endif
|
||
|
||
return attrDF
|
||
end
|
||
|
||
/// set the dimension scales of an area detector dataset.
|
||
///
|
||
/// the intrinsic dimensions 0 and 1 are scaled according to the data source
|
||
/// (currently supported: Prosilica cameras, Scienta electron analyser).
|
||
/// the extra dimensions are scaled according to the scan.
|
||
/// the latter requires that the positioner names and position values are available.
|
||
///
|
||
function adh5_scale(data,[source])
|
||
wave data
|
||
string source
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
dfref dataDF = GetWavesDataFolderDFR(data)
|
||
dfref attrDF = GetAttrDataFolderDFR(data)
|
||
|
||
if (ParamIsDefault(source))
|
||
// is the source a Scienta analyser?
|
||
wave /SDFR=attrDF /Z AcquisitionMode
|
||
wave /SDFR=attrDF /T /Z Manufacturer
|
||
source = "unknown"
|
||
if (waveexists(Manufacturer) && (numpnts(Manufacturer) >= 1))
|
||
strswitch(Manufacturer[0])
|
||
case "VG Scienta":
|
||
source = "scienta"
|
||
break
|
||
case "Prosilica":
|
||
source = "prosilica"
|
||
break
|
||
endswitch
|
||
elseif (waveexists(AcquisitionMode) && (numpnts(AcquisitionMode) >= 1))
|
||
if (stringmatch(note(AcquisitionMode), "*SCIENTA*"))
|
||
source = "scienta"
|
||
endif
|
||
endif
|
||
endif
|
||
|
||
strswitch(source)
|
||
case "prosilica":
|
||
// pixel scale - nothing to do
|
||
break
|
||
case "scienta":
|
||
adh5_scale_scienta(data)
|
||
break
|
||
endswitch
|
||
|
||
setdatafolder saveDF
|
||
end
|
||
|
||
/// load the detector dataset from the open HDF5 file.
|
||
///
|
||
/// the function loads the dataset image by image using the hyperslab option.
|
||
/// this function gives the same result as adh5_load_detector.
|
||
/// it is about 5% slower, and it depends on HDF5 Browser code.
|
||
/// but it does not choke on large datasets (as long as the final wave fits into memory).
|
||
///
|
||
/// @param fileID ID of open HDF5 file from HDF5OpenFile.
|
||
/// @param detectorpath path to detector group in the HDF5 file.
|
||
/// @param progress 1 (default): show progress window; 0: do not show progress window.
|
||
///
|
||
/// @return 0 if successful, non-zero if an error occurred.
|
||
///
|
||
function adh5_load_detector_slabs(fileID, detectorpath, [progress])
|
||
variable fileID
|
||
string detectorpath
|
||
variable progress
|
||
|
||
if (ParamIsDefault(progress))
|
||
progress = 1
|
||
endif
|
||
variable result = 0
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5LoadData")
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return -1
|
||
endif
|
||
if (di.ndims < 2)
|
||
print "error: rank of dataset < 2"
|
||
return -2
|
||
endif
|
||
|
||
// nx and nz are the image dimensions
|
||
variable idx, idy, idz, idt, izt
|
||
idx = di.ndims - 1
|
||
idy = di.ndims - 2
|
||
idz = -1
|
||
idt = -1
|
||
|
||
variable nx, ny, nz, nt, nzt
|
||
nx = di.dims[idx]
|
||
ny = di.dims[idy]
|
||
nz = 1
|
||
nt = 1
|
||
|
||
make /n=(nx,ny,nz,nt) /o data
|
||
string dim_labels = "AD_Dim0;AD_Dim1;AD_DimN;AD_DimX;AD_DimY"
|
||
string dim_label
|
||
dim_label = StringFromList(0, dim_labels, ";")
|
||
setdimlabel 0, -1, $dim_label, data
|
||
dim_labels = RemoveFromList(dim_label, dim_labels, ";")
|
||
dim_label = StringFromList(0, dim_labels, ";")
|
||
setdimlabel 1, -1, $dim_label, data
|
||
dim_labels = RemoveFromList(dim_label, dim_labels, ";")
|
||
|
||
// find additional dimensions, ignore singletons
|
||
variable id
|
||
for (id = idy - 1; (id >= 0) && (nz == 1); id -= 1)
|
||
if (di.dims[id] > 1)
|
||
idz = id
|
||
nz = di.dims[id]
|
||
dim_label = StringFromList(0, dim_labels, ";")
|
||
setdimlabel 2, -1, $dim_label, data
|
||
endif
|
||
dim_labels = RemoveListItem(0, dim_labels, ";")
|
||
endfor
|
||
for (id = idz - 1; (id >= 0) && (nt == 1); id -= 1)
|
||
if (di.dims[id] > 1)
|
||
idt = id
|
||
nt = di.dims[id]
|
||
dim_label = StringFromList(0, dim_labels, ";")
|
||
setdimlabel 3, -1, $dim_label, data
|
||
endif
|
||
dim_labels = RemoveListItem(0, dim_labels, ";")
|
||
endfor
|
||
redimension /n=(nx,ny,nz,nt) data
|
||
|
||
// default values if dimensions are not present in dataset
|
||
if (idz < 0)
|
||
idz = idx + 1
|
||
idt = idz + 1
|
||
elseif (idt < 0)
|
||
idt = idx + 1
|
||
endif
|
||
|
||
nzt = nz * nt
|
||
izt = 0
|
||
if (progress)
|
||
display_progress_panel("HDF5 Import", "Loading data...", nzt)
|
||
endif
|
||
|
||
// load data image by image
|
||
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
|
||
wave slab
|
||
slab[][%Start] = 0
|
||
slab[][%Stride] = 1
|
||
slab[][%Count] = 1
|
||
slab[][%Block] = 1
|
||
slab[idx][%Block] = nx
|
||
slab[idy][%Block] = ny
|
||
|
||
variable iz, it
|
||
for (iz = 0; iz < nz; iz += 1)
|
||
for (it = 0; it < nt; it += 1)
|
||
slab[idz][%Start] = iz
|
||
slab[idt][%Start] = it
|
||
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
|
||
wave slabdata // 2D, 3D, or 4D with singletons
|
||
switch (WaveDims(slabdata))
|
||
case 2:
|
||
data[][][iz][it] = slabdata[q][p]
|
||
break
|
||
case 3:
|
||
data[][][iz][it] = slabdata[0][q][p]
|
||
break
|
||
case 4:
|
||
data[][][iz][it] = slabdata[0][0][q][p]
|
||
break
|
||
endswitch
|
||
// progress window
|
||
izt += 1
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
endfor
|
||
if (result < 0)
|
||
break
|
||
endif
|
||
endfor
|
||
|
||
if (nz == 1)
|
||
redimension /n=(nx,ny) data
|
||
elseif (nt == 1)
|
||
redimension /n=(nx,ny,nz) data
|
||
endif
|
||
|
||
if (progress)
|
||
kill_progress_panel()
|
||
endif
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
return result
|
||
end
|
||
|
||
/// load a single image from the detector dataset of the open HDF5 file
|
||
///
|
||
/// the function can average over a region in the extra dimensions.
|
||
///
|
||
/// @param fileID ID of open HDF5 file from HDF5OpenFile
|
||
/// @param detectorpath path to detector group in the HDF5 file
|
||
/// @param dim2start 2nd dimension coordinate of the first image
|
||
/// note that the order of dimensions is reversed in the file
|
||
/// 2nd dimension = N dimension in area detector = dimension 0 of the three-dimensional HDF dataset
|
||
/// set to 0 if dimension may not be present
|
||
/// @param dim2count number of subsequent images to average
|
||
/// set to 1 if dimension may not be present
|
||
/// @param dim3start 3rd dimension coordinate of the first image
|
||
/// note that the order of dimensions is reversed in the file
|
||
/// 3rd dimension = extra X dimension in area detector = dimension 0 of the four-dimensional HDF dataset
|
||
/// set to 0 if dimension may not be present
|
||
/// @param dim3count number of subsequent images to average
|
||
/// set to 1 if dimension may not be present
|
||
///
|
||
function adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, dim3start, dim3count)
|
||
variable fileID
|
||
string detectorpath
|
||
variable dim2start
|
||
variable dim2count
|
||
variable dim3start
|
||
variable dim3count
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5LoadData")
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return -1
|
||
endif
|
||
if (di.ndims < 1)
|
||
print "error: rank of dataset < 1"
|
||
return -2
|
||
endif
|
||
|
||
// nx and nz are the image dimensions
|
||
variable idx, idy, idz, idt
|
||
idx = di.ndims - 1
|
||
idy = di.ndims >= 2 ? di.ndims - 2 : 1
|
||
idz = di.ndims >= 3 ? di.ndims - 3 : 2
|
||
idt = di.ndims >= 4 ? di.ndims - 4 : 3
|
||
|
||
variable nx, ny
|
||
nx = di.dims[idx]
|
||
ny = di.ndims >= 2 ? di.dims[idy] : 1
|
||
|
||
variable dim2end = dim2start + dim2count - 1
|
||
variable dim3end = dim3start + dim3count - 1
|
||
|
||
// the slab wave is at least 4-dimensional
|
||
// it will also load lower-dimensional datasets
|
||
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
|
||
wave slab
|
||
slab[][%Start] = 0
|
||
slab[][%Stride] = 1
|
||
slab[][%Count] = 1
|
||
slab[][%Block] = 1
|
||
slab[idx][%Block] = nx
|
||
slab[idy][%Block] = ny
|
||
|
||
make /n=(nx,ny)/o/d data
|
||
data = 0
|
||
variable iz, it
|
||
variable navg = 0
|
||
for (iz = dim2start; iz <= dim2end; iz += 1)
|
||
for (it = dim3start; it <= dim3end; it += 1)
|
||
slab[idz][%Start] = iz
|
||
slab[idt][%Start] = it
|
||
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
|
||
wave slabdata // 2D, 3D, or 4D with singletons
|
||
switch (WaveDims(slabdata))
|
||
case 1:
|
||
data += slabdata[p]
|
||
navg += 1
|
||
break
|
||
case 2:
|
||
data += slabdata[q][p]
|
||
navg += 1
|
||
break
|
||
case 3:
|
||
data += slabdata[0][q][p]
|
||
navg += 1
|
||
break
|
||
case 4:
|
||
data += slabdata[0][0][q][p]
|
||
navg += 1
|
||
break
|
||
endswitch
|
||
endfor
|
||
endfor
|
||
data /= navg
|
||
setdimlabel 0, -1, AD_Dim0, data
|
||
setdimlabel 1, -1, AD_Dim1, data
|
||
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
end
|
||
|
||
/// get a list of functions which can be used as reduction functions.
|
||
///
|
||
/// the function evaluates only the function arguments,
|
||
/// it may thus include functions which are not suitable as reduction functions.
|
||
///
|
||
function /s adh5_list_reduction_funcs()
|
||
string all_funcs = FunctionList("*", ";", "KIND:6,NPARAMS:2,VALTYPE:8")
|
||
string result = ""
|
||
|
||
variable ii
|
||
variable nn = ItemsInList(all_funcs, ";")
|
||
|
||
string funcname
|
||
string info
|
||
variable nparams
|
||
variable accept
|
||
|
||
for (ii = 0; ii < nn; ii += 1)
|
||
funcname = StringFromList(ii, all_funcs, ";")
|
||
info = FunctionInfo(funcname)
|
||
accept = (NumberByKey("RETURNTYPE", info, ":", ";") == 0x4000)
|
||
accept = accept && (cmpstr(StringByKey("THREADSAFE", info, ":", ";"), "yes") == 0)
|
||
accept = accept && (NumberByKey("N_PARAMS", info, ":", ";") == 2)
|
||
accept = accept && (NumberByKey("N_OPT_PARAMS", info, ":", ";") == 0)
|
||
if (accept)
|
||
// one numeric wave and one pass-by-reference string
|
||
accept = accept && (NumberByKey("PARAM_0_TYPE", info, ":", ";") == 0x4002)
|
||
accept = accept && (NumberByKey("PARAM_1_TYPE", info, ":", ";") == 0x3000)
|
||
endif
|
||
if (accept)
|
||
result = AddListItem(funcname, result, ";")
|
||
endif
|
||
endfor
|
||
|
||
result = SortList(result, ";", 4)
|
||
return result
|
||
end
|
||
|
||
/// function prototype for adh5_load_reduced_detector
|
||
///
|
||
/// this is a prototype of custom functions that convert (reduce) a two-dimensional detector image
|
||
/// into one or more one-dimensional waves.
|
||
/// data processing can be tuned with a set of parameters.
|
||
///
|
||
/// reduction functions have a fixed signature (function arguments) so that the file import functions
|
||
/// can call them efficiently on a series of detector images.
|
||
/// pearl procedures comes with a number of pre-defined reduction functions
|
||
/// but you may as well implement your own functions.
|
||
/// if you write your own function, you must use the same declaration and arguments
|
||
/// as this function except for the function name.
|
||
/// you can do many things in a reduction function,
|
||
/// e.g. integration over a region of interest, curve fitting, etc.
|
||
///
|
||
/// each destination wave is a one-dimensional intensity distribution.
|
||
/// the function must redimension each of these waves to one of the image dimensions
|
||
/// by calling the adh5_setup_profile() function.
|
||
/// this function will also copy the scale information and dimension labels,
|
||
/// which is important for the proper scaling of the result.
|
||
///
|
||
/// the meaning of the data in the result waves is up to the particular function,
|
||
/// e.g. dest1 could hold the mean value and dest2 the one-sigma error,
|
||
/// or dest1 could hold the X-profile, and dest2 the Y-profile.
|
||
///
|
||
/// @param source source wave.
|
||
/// two-dimensional intensity distribution (image).
|
||
/// the scales are carried over to the result waves.
|
||
///
|
||
/// @param param string with optional parameters, shared between calls.
|
||
/// this is a pass-by-reference argument,
|
||
/// the function may modify the string.
|
||
///
|
||
/// @return a free wave containing references of the result waves.
|
||
/// the result waves should as well be free waves.
|
||
/// if an error occurred, the reference wave is empty.
|
||
///
|
||
threadsafe function /wave adh5_default_reduction(source, param)
|
||
wave source
|
||
string ¶m
|
||
|
||
// demo code
|
||
// integrate along the dimensions
|
||
make /n=0 /free dest1, dest2
|
||
adh5_setup_profile(source, dest1, 0)
|
||
ad_profile_x_w(source, 0, -1, dest1)
|
||
adh5_setup_profile(source, dest2, 1)
|
||
ad_profile_y_w(source, 0, -1, dest2)
|
||
|
||
make /n=2 /free /wave results
|
||
results[0] = dest1
|
||
results[1] = dest2
|
||
return results
|
||
end
|
||
|
||
/// set up a one-dimensional wave for a line profile based on a 2D original wave.
|
||
///
|
||
/// redimensions the profile wave to the given dimension.
|
||
/// copies the scale and dimension label of the given dimension.
|
||
///
|
||
threadsafe function adh5_setup_profile(image, profile, dim)
|
||
wave image // prototype
|
||
wave profile // destination wave
|
||
variable dim // which dimension to keep: 0 = X, 1 = Y
|
||
|
||
redimension /n=(dimsize(image, dim)) profile
|
||
setscale /p x dimoffset(image, dim), dimdelta(image, dim), waveunits(image, dim), profile
|
||
setscale d 0, 0, waveunits(image, -1), profile
|
||
setdimlabel 0, -1, $getdimlabel(image, dim, -1), profile
|
||
end
|
||
|
||
/// wrapper function for testing reduction functions from the command line.
|
||
///
|
||
/// reduction functions cannot be used on the command line because they require
|
||
/// a pass-by-reference argument and return free waves.
|
||
/// this function expects the reduction parameters in a normal string
|
||
/// and copies the results into the current data folder.
|
||
/// the prefix of the result names can be specified.
|
||
///
|
||
/// @param source source wave.
|
||
/// two-dimensional intensity distribution (image).
|
||
/// the scales are carried over to the result waves.
|
||
///
|
||
/// @param reduction_func name of the reduction function to apply to the source data.
|
||
///
|
||
/// @param reduction_param string with reduction parameters as required by the specific reduction function.
|
||
///
|
||
/// @param result_prefix name prefix of result waves.
|
||
/// a numeric index is appended to distinguish the results.
|
||
/// the index starts at 1. existing waves are overwritten.
|
||
///
|
||
/// @return a copy of the reduction_param string, possibly modified by the reduction function.
|
||
///
|
||
function /s adh5_test_reduction_func(source, reduction_func, reduction_param, result_prefix)
|
||
wave source
|
||
funcref adh5_default_reduction reduction_func
|
||
string reduction_param
|
||
string result_prefix
|
||
|
||
wave /wave results = reduction_func(source, reduction_param)
|
||
adh5_get_result_waves(results, result_prefix, 1)
|
||
|
||
return reduction_param
|
||
end
|
||
|
||
/// reduce a three-dimensional intensity distribution
|
||
///
|
||
/// this function reduces a three-dimensional intensity distribution
|
||
/// to a two-dimensional intensity map.
|
||
/// the given reduction function is applied once on each Z section.
|
||
///
|
||
/// @param source source wave.
|
||
/// three-dimensional intensity distribution (image).
|
||
/// the scales are carried over to the result waves.
|
||
///
|
||
/// @param reduction_func name of the reduction function to apply to the source data.
|
||
///
|
||
/// @param reduction_param string with reduction parameters as required by the specific reduction function.
|
||
///
|
||
/// @param result_prefix name prefix of result waves.
|
||
/// a numeric index is appended to distinguish the results.
|
||
/// the index starts at 1. existing waves are overwritten.
|
||
///
|
||
///
|
||
function adh5_reduce_brick(source, reduction_func, reduction_param, result_prefix, [progress, nthreads])
|
||
wave source
|
||
funcref adh5_default_reduction reduction_func
|
||
string reduction_param
|
||
string result_prefix
|
||
|
||
variable progress
|
||
variable nthreads
|
||
|
||
if (ParamIsDefault(progress))
|
||
progress = 1
|
||
endif
|
||
if (ParamIsDefault(nthreads))
|
||
nthreads = -1
|
||
endif
|
||
variable result = 0
|
||
|
||
// nx and nz are the image dimensions
|
||
variable nx, ny, nz, nt
|
||
nx = dimsize(source, 0)
|
||
ny = dimsize(source, 1)
|
||
nz = dimsize(source, 2)
|
||
// force 4th dimension to singleton (ad_extract_slab handles 3 dimensions only)
|
||
nt = 0
|
||
|
||
variable nzt = max(nz, 1) * max(nt, 1)
|
||
variable izt
|
||
|
||
// set up multi threading
|
||
if (nthreads < 0)
|
||
nthreads = ThreadProcessorCount
|
||
endif
|
||
if (nthreads > 0)
|
||
variable threadGroupID = ThreadGroupCreate(nthreads)
|
||
variable ithread
|
||
for (ithread = 0; ithread < nthreads; ithread += 1)
|
||
ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func)
|
||
endfor
|
||
else
|
||
make /n=(nzt) /df /free processing_folders
|
||
endif
|
||
|
||
if (progress)
|
||
display_progress_panel("data reduction", "extracting data (step 1 of 2)...", nzt)
|
||
endif
|
||
|
||
variable iz, it
|
||
string dfname
|
||
variable iw, nw
|
||
string sw
|
||
make /n=0 /free /wave result_waves
|
||
|
||
izt = 0
|
||
for (iz = 0; iz < max(nz, 1); iz += 1)
|
||
for (it = 0; it < max(nt, 1); it += 1)
|
||
dfname = "processing_" + num2str(izt)
|
||
newdatafolder /s $dfname
|
||
ad_extract_slab(source, nan, nan, nan, nan, iz, iz, "image", pscale=1)
|
||
wave image
|
||
|
||
// send to processing queue
|
||
variable /g r_index = iz
|
||
variable /g s_index = it
|
||
string /g func_param = reduction_param
|
||
|
||
if (nthreads > 0)
|
||
WaveClear image
|
||
ThreadGroupPutDF threadGroupID, :
|
||
else
|
||
processing_folders[izt] = GetDataFolderDFR()
|
||
string param = reduction_param
|
||
wave /wave reduced_waves = reduction_func(image, param)
|
||
variable /g func_result = numpnts(reduced_waves)
|
||
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
||
WaveClear image, reduced_waves
|
||
setdatafolder ::
|
||
endif
|
||
|
||
izt += 1
|
||
// progress window
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
endfor
|
||
endfor
|
||
|
||
if (progress)
|
||
update_progress_panel(0, message="processing data (step 2 of 2)...")
|
||
endif
|
||
|
||
dfref dfr
|
||
for (izt = 0; (izt < nzt) && (result == 0); izt += 1)
|
||
if (nthreads > 0)
|
||
do
|
||
dfr = ThreadGroupGetDFR(threadGroupID, 1000)
|
||
if (DatafolderRefStatus(dfr) != 0)
|
||
break
|
||
endif
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
while (1)
|
||
else
|
||
dfr = processing_folders[izt]
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
endif
|
||
|
||
if (result != 0)
|
||
break
|
||
endif
|
||
|
||
nvar rr = dfr:r_index
|
||
nvar ss = dfr:s_index
|
||
nvar func_result = dfr:func_result
|
||
|
||
if (func_result < 1)
|
||
result = -3 // dimension reduction error
|
||
break
|
||
endif
|
||
|
||
if (numpnts(result_waves) == 0)
|
||
redimension /n=(func_result) result_waves
|
||
for (iw = 0; iw < func_result; iw += 1)
|
||
sw = "redw_" + num2str(iw)
|
||
wave profile = dfr:$sw
|
||
sw = "ReducedData" + num2str(iw+1)
|
||
make /n=(dimsize(profile, 0), nz, nt) /d /o $sw
|
||
wave data = $sw
|
||
setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data
|
||
setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data
|
||
setscale /p y dimoffset(source, 2), dimdelta(source, 2), waveunits(source, 2), data
|
||
setscale /p z dimoffset(source, 3), dimdelta(source, 3), waveunits(source, 3), data
|
||
setscale d 0, 0, waveunits(profile, -1), data
|
||
result_waves[iw] = data
|
||
endfor
|
||
endif
|
||
for (iw = 0; iw < func_result; iw += 1)
|
||
sw = "redw_" + num2str(iw)
|
||
wave profile = dfr:$sw
|
||
wave data = result_waves[iw]
|
||
data[][rr][ss] = profile[p]
|
||
endfor
|
||
endfor
|
||
|
||
if (nthreads > 0)
|
||
variable tstatus = ThreadGroupRelease(threadGroupID)
|
||
if (tstatus == -2)
|
||
result = -5 // thread did not terminate properly
|
||
endif
|
||
else
|
||
for (izt = 0; izt < nzt; izt += 1)
|
||
KillDataFolder /Z processing_folders[izt]
|
||
endfor
|
||
endif
|
||
|
||
if (progress)
|
||
kill_progress_panel()
|
||
endif
|
||
|
||
return result
|
||
end
|
||
|
||
/// thread worker for adh5_reduce_brick
|
||
///
|
||
/// this function polls job data folders from thread group 0
|
||
/// and calls the reduction function on their contents.
|
||
/// the result waves have prefix "redw_" and are saved in the job folder.
|
||
///
|
||
threadsafe static function reduce_brick_worker(reduction_func)
|
||
funcref adh5_default_reduction reduction_func
|
||
do
|
||
// wait for job from main thread
|
||
do
|
||
dfref dfr = ThreadGroupGetDFR(0, 1000)
|
||
if (DataFolderRefStatus(dfr) == 0)
|
||
if (GetRTError(2))
|
||
return 0 // no more jobs
|
||
endif
|
||
else
|
||
break
|
||
endif
|
||
while (1)
|
||
|
||
// get input data
|
||
wave image = dfr:image
|
||
svar func_param = dfr:func_param
|
||
nvar rr = dfr:r_index
|
||
nvar ss = dfr:s_index
|
||
|
||
// do the work
|
||
newdatafolder /s outDF
|
||
variable /g r_index = rr
|
||
variable /g s_index = ss
|
||
string param = func_param
|
||
wave /wave reduced_waves = reduction_func(image, param)
|
||
variable /g func_result = numpnts(reduced_waves)
|
||
|
||
// send output to queue and clean up
|
||
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
||
WaveClear image, reduced_waves
|
||
ThreadGroupPutDF 0, :
|
||
KillDataFolder dfr
|
||
while (1)
|
||
|
||
return 0
|
||
end
|
||
|
||
/// copy waves from wave reference wave into current data folder
|
||
///
|
||
/// this function copies waves that are referenced in a wave reference wave into the current data folder.
|
||
/// the destination waves get new names consisting of a prefix and a numeric index.
|
||
/// the index is the array index of the wave in results plus a chosen offset.
|
||
///
|
||
/// @param results a wave reference wave pointing to result waves from data reduction.
|
||
/// the waves can be free or regular waves.
|
||
/// results can be a free or regular wave.
|
||
///
|
||
/// @param result_prefix name prefix of the copied waves.
|
||
///
|
||
/// @param start_index start index (offset) of the copied waves.
|
||
///
|
||
threadsafe function adh5_get_result_waves(results, result_prefix, start_index)
|
||
wave /wave results
|
||
string result_prefix
|
||
variable start_index
|
||
|
||
variable nw = numpnts(results)
|
||
variable iw
|
||
string sw
|
||
for (iw = 0; iw < nw; iw += 1)
|
||
sw = result_prefix + num2str(iw + start_index)
|
||
duplicate /o results[iw], $sw
|
||
endfor
|
||
end
|
||
|
||
/// load a reduced detector dataset from the open HDF5 file.
|
||
///
|
||
/// the function loads the dataset image by image using the hyperslab option
|
||
/// and applies a custom reduction function to each image.
|
||
/// the results from the reduction function are composed into one result wave.
|
||
/// the raw data are discarded.
|
||
///
|
||
/// by default, the reduction function is called in separate threads to reduce the total loading time.
|
||
/// (see the global variable adh5_perf_secs which reports the total run time of the function.)
|
||
/// the effect varies depending on the balance between file loading (image size)
|
||
/// and data processing (complexity of the reduction function).
|
||
/// for debugging the reduction function, multi-threading can be disabled.
|
||
///
|
||
/// @param fileID ID of open HDF5 file from HDF5OpenFile
|
||
/// @param detectorpath path to detector group in the HDF5 file
|
||
/// @param reduction_func custom reduction function
|
||
/// (any user-defined function which has the same parameters as adh5_default_reduction())
|
||
/// @param reduction_param parameter string for the reduction function
|
||
/// @param progress 1 (default): show progress window; 0: do not show progress window
|
||
/// @param nthreads -1 (default): use as many threads as there are processor cores (in addition to main thread)
|
||
/// 0: use main thread only (e.g. for debugging the reduction function)
|
||
/// >= 1: use a fixed number of (additional) threads
|
||
function adh5_load_reduced_detector(fileID, detectorpath, reduction_func, reduction_param, [progress, nthreads])
|
||
variable fileID
|
||
string detectorpath
|
||
funcref adh5_default_reduction reduction_func
|
||
string reduction_param
|
||
variable progress
|
||
variable nthreads
|
||
|
||
if (ParamIsDefault(progress))
|
||
progress = 1
|
||
endif
|
||
if (ParamIsDefault(nthreads))
|
||
nthreads = -1
|
||
endif
|
||
variable result = 0
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5LoadData")
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// detector data
|
||
datasetname = detectorpath + "data"
|
||
STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf.
|
||
InitHDF5DataInfo(di)
|
||
variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
|
||
if (err != 0)
|
||
print "error accessing detector/data"
|
||
return -1
|
||
endif
|
||
if (di.ndims < 2)
|
||
print "error: rank of dataset < 2"
|
||
return -2
|
||
endif
|
||
|
||
// nx and nz are the image dimensions
|
||
variable idx, idy, idz, idt
|
||
idx = di.ndims - 1
|
||
idy = di.ndims - 2
|
||
idz = -1
|
||
idt = -1
|
||
|
||
variable nx, ny, nz, nt
|
||
nx = di.dims[idx]
|
||
ny = di.dims[idy]
|
||
nz = 1
|
||
nt = 1
|
||
|
||
// find additional dimensions, ignore singletons
|
||
variable id
|
||
for (id = idy - 1; (id >= 0) && (nz == 1); id -= 1)
|
||
if (di.dims[id] > 1)
|
||
idz = id
|
||
nz = di.dims[id]
|
||
endif
|
||
endfor
|
||
for (id = idz - 1; (id >= 0) && (nt == 1); id -= 1)
|
||
if (di.dims[id] > 1)
|
||
idt = id
|
||
nt = di.dims[id]
|
||
endif
|
||
endfor
|
||
// default values if dimensions are not present in dataset
|
||
if (idz < 0)
|
||
idz = idx + 1
|
||
idt = idz + 1
|
||
elseif (idt < 0)
|
||
idt = idx + 1
|
||
endif
|
||
variable nzt = nz * nt
|
||
variable izt
|
||
|
||
// load data image by image
|
||
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
|
||
wave slab
|
||
slab[][%Start] = 0
|
||
slab[][%Stride] = 1
|
||
slab[][%Count] = 1
|
||
slab[][%Block] = 1
|
||
slab[idx][%Block] = nx
|
||
slab[idy][%Block] = ny
|
||
|
||
// set up multi threading
|
||
if (nthreads < 0)
|
||
nthreads = ThreadProcessorCount
|
||
endif
|
||
if (nthreads > 0)
|
||
variable threadGroupID = ThreadGroupCreate(nthreads)
|
||
variable ithread
|
||
for (ithread = 0; ithread < nthreads; ithread += 1)
|
||
ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func)
|
||
endfor
|
||
else
|
||
make /n=(nzt) /df /free processing_folders
|
||
endif
|
||
|
||
if (progress)
|
||
display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt)
|
||
endif
|
||
|
||
make /n=(nx,ny)/d image_template
|
||
setdimlabel 0, -1, AD_Dim0, image_template
|
||
setdimlabel 1, -1, AD_Dim1, image_template
|
||
adh5_scale(image_template)
|
||
|
||
variable iz, it
|
||
string dfname
|
||
variable iw, nw
|
||
string sw
|
||
make /n=0 /free /wave result_waves
|
||
|
||
izt = 0
|
||
for (iz = 0; iz < nz; iz += 1)
|
||
for (it = 0; it < nt; it += 1)
|
||
// load hyperslab
|
||
slab[idz][%Start] = iz
|
||
slab[idt][%Start] = it
|
||
dfname = "processing_" + num2str(izt)
|
||
newdatafolder /s $dfname
|
||
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
|
||
|
||
// send to processing queue
|
||
duplicate image_template, image
|
||
variable /g r_index = iz
|
||
variable /g s_index = it
|
||
string /g func_param = reduction_param
|
||
|
||
if (nthreads > 0)
|
||
WaveClear image
|
||
ThreadGroupPutDF threadGroupID, :
|
||
else
|
||
processing_folders[izt] = GetDataFolderDFR()
|
||
wave slabdata
|
||
wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param)
|
||
variable /g func_result = numpnts(reduced_waves)
|
||
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
||
WaveClear slabdata, image, reduced_waves
|
||
setdatafolder ::
|
||
endif
|
||
|
||
izt += 1
|
||
// progress window
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
endfor
|
||
endfor
|
||
|
||
killwaves /z slab, image_template
|
||
if (progress)
|
||
update_progress_panel(0, message="Processing data (step 2 of 2)...")
|
||
endif
|
||
|
||
dfref dfr
|
||
for (izt = 0; (izt < nzt) && (result == 0); izt += 1)
|
||
if (nthreads > 0)
|
||
do
|
||
dfr = ThreadGroupGetDFR(threadGroupID, 1000)
|
||
if (DatafolderRefStatus(dfr) != 0)
|
||
break
|
||
endif
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
while (1)
|
||
else
|
||
dfr = processing_folders[izt]
|
||
if (progress)
|
||
if (update_progress_panel(izt))
|
||
result = -4 // user abort
|
||
break
|
||
endif
|
||
endif
|
||
endif
|
||
|
||
if (result != 0)
|
||
break
|
||
endif
|
||
|
||
nvar rr = dfr:r_index
|
||
nvar ss = dfr:s_index
|
||
nvar func_result = dfr:func_result
|
||
|
||
if (func_result < 1)
|
||
result = -3 // dimension reduction error
|
||
break
|
||
endif
|
||
|
||
if (numpnts(result_waves) == 0)
|
||
redimension /n=(func_result) result_waves
|
||
for (iw = 0; iw < func_result; iw += 1)
|
||
sw = "redw_" + num2str(iw)
|
||
wave profile = dfr:$sw
|
||
sw = "ReducedData" + num2str(iw+1)
|
||
make /n=(dimsize(profile, 0), nz, nt) /d /o $sw
|
||
wave data = $sw
|
||
setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data
|
||
setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data
|
||
setscale d 0, 0, waveunits(profile, -1), data
|
||
result_waves[iw] = data
|
||
endfor
|
||
endif
|
||
for (iw = 0; iw < func_result; iw += 1)
|
||
sw = "redw_" + num2str(iw)
|
||
wave profile = dfr:$sw
|
||
wave data = result_waves[iw]
|
||
data[][rr][ss] = profile[p]
|
||
endfor
|
||
endfor
|
||
|
||
if (nthreads > 0)
|
||
variable tstatus = ThreadGroupRelease(threadGroupID)
|
||
if (tstatus == -2)
|
||
result = -5 // thread did not terminate properly
|
||
endif
|
||
else
|
||
for (izt = 0; izt < nzt; izt += 1)
|
||
KillDataFolder /Z processing_folders[izt]
|
||
endfor
|
||
endif
|
||
|
||
if (result == 0)
|
||
nw = numpnts(result_waves)
|
||
for (iw = 0; iw < nw; iw += 1)
|
||
wave data = result_waves[iw]
|
||
if (nz == 1)
|
||
redimension /n=(dimsize(data, 0)) data
|
||
elseif (nt == 1)
|
||
redimension /n=(dimsize(data, 0),nz) data
|
||
setdimlabel 1, -1, AD_DimN, data
|
||
else
|
||
setdimlabel 1, -1, AD_DimN, data
|
||
setdimlabel 2, -1, AD_DimX, data
|
||
endif
|
||
endfor
|
||
endif
|
||
if (progress)
|
||
kill_progress_panel()
|
||
endif
|
||
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
return result
|
||
end
|
||
|
||
threadsafe static function reduce_slab_worker(reduction_func)
|
||
funcref adh5_default_reduction reduction_func
|
||
do
|
||
// wait for job from main thread
|
||
do
|
||
dfref dfr = ThreadGroupGetDFR(0, 1000)
|
||
if (DataFolderRefStatus(dfr) == 0)
|
||
if (GetRTError(2))
|
||
return 0 // no more jobs
|
||
endif
|
||
else
|
||
break
|
||
endif
|
||
while (1)
|
||
|
||
// get input data
|
||
wave slabdata = dfr:slabdata
|
||
wave image = dfr:image
|
||
svar func_param = dfr:func_param
|
||
nvar rr = dfr:r_index
|
||
nvar ss = dfr:s_index
|
||
|
||
// do the work
|
||
newdatafolder /s outDF
|
||
variable /g r_index = rr
|
||
variable /g s_index = ss
|
||
wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param)
|
||
variable /g func_result = numpnts(reduced_waves)
|
||
|
||
// send output to queue and clean up
|
||
adh5_get_result_waves(reduced_waves, "redw_", 0)
|
||
WaveClear slabdata, image, reduced_waves
|
||
ThreadGroupPutDF 0, :
|
||
KillDataFolder dfr
|
||
while (1)
|
||
|
||
return 0
|
||
end
|
||
|
||
threadsafe static function /wave reduce_slab_image(slabdata, image, reduction_func, reduction_param)
|
||
wave slabdata
|
||
wave image
|
||
funcref adh5_default_reduction reduction_func
|
||
string reduction_param
|
||
|
||
switch (WaveDims(slabdata))
|
||
case 2:
|
||
image = slabdata[q][p]
|
||
break
|
||
case 3:
|
||
image = slabdata[0][q][p]
|
||
break
|
||
case 4:
|
||
image = slabdata[0][0][q][p]
|
||
break
|
||
endswitch
|
||
|
||
return reduction_func(image, reduction_param)
|
||
end
|
||
|
||
/// load an NDAttributes group from an open HDF5 file into the current data folder.
|
||
///
|
||
/// datasets contained in the group are loaded as waves.
|
||
/// if a dataset contains only one data point, it is added to the IN, ID, IV, IU waves,
|
||
/// where IN = EPICS channel name, ID = attribute name, IV = value, IU = unit
|
||
/// (units are left empty as they are not saved in HDF5).
|
||
/// attributes of the NDAttributes group are added to the IN, ID, IV, IU waves,
|
||
/// however, IN and IU are left empty as this information is not saved in the HDF5 file.
|
||
///
|
||
/// @param fileID ID of open HDF5 file from HDF5OpenFile
|
||
/// @param attributespath path to NDAttributes group in the HDF5 file
|
||
///
|
||
function adh5_loadattr_all(fileID, attributespath)
|
||
variable fileID
|
||
string attributespath
|
||
|
||
string datasetname
|
||
string datawavename
|
||
|
||
// avoid compilation error if HDF5 XOP has not been loaded
|
||
#if Exists("HDF5LoadData")
|
||
|
||
// datasets in NDAttributes group
|
||
HDF5ListGroup /F /TYPE=2 fileID, attributespath
|
||
string h5datasets = S_HDF5ListGroup
|
||
HDF5ListAttributes /TYPE=1 /Z fileID, attributespath
|
||
string h5attributes = S_HDF5ListAttributes
|
||
|
||
variable nds = ItemsInList(h5datasets, ";")
|
||
variable na = ItemsInList(h5attributes, ";")
|
||
variable ids
|
||
variable idest = 0
|
||
variable n_attr
|
||
string s_attr
|
||
string s_source
|
||
|
||
make /n=(nds+na) /t /o IN, ID, IV, IU
|
||
|
||
for (ids = 0; ids < nds; ids += 1)
|
||
datasetname = StringFromList(ids, h5datasets, ";")
|
||
HDF5LoadData /O/Q fileID, datasetname
|
||
if (v_flag == 0)
|
||
datawavename = StringFromList(0, s_wavenames)
|
||
else
|
||
datawavename = ""
|
||
endif
|
||
HDF5LoadData /A="source"/O/Q/TYPE=2 fileID, datasetname
|
||
if (v_flag == 0)
|
||
wave /t source
|
||
s_source = source[0]
|
||
else
|
||
s_source = ""
|
||
endif
|
||
read_attribute_info(datawavename, s_source, idest)
|
||
endfor
|
||
|
||
// attributes of NDAttributes group
|
||
if (v_flag == 0)
|
||
nds = ItemsInList(h5attributes, ";")
|
||
else
|
||
nds = 0
|
||
endif
|
||
for (ids = 0; ids < nds; ids += 1)
|
||
datasetname = StringFromList(ids, h5attributes, ";")
|
||
HDF5LoadData /A=datasetname/O/Q/TYPE=1 fileID, attributespath
|
||
if (v_flag == 0)
|
||
datawavename = StringFromList(0, s_wavenames)
|
||
read_attribute_info(datawavename, "", idest) // we don't get the source of these attributes
|
||
endif
|
||
endfor
|
||
|
||
redimension /n=(idest) IN, ID, IV, IU
|
||
sort {IN, ID}, IN, ID, IV, IU
|
||
|
||
killwaves /z source
|
||
#else
|
||
Abort "HDF5 XOP not loaded."
|
||
#endif
|
||
|
||
end
|
||
|
||
/// sub-function of adh5_loadattr_all.
|
||
///
|
||
/// reads one attribute from a wave which was loaded from an HDF5 file into the info waves IN, ID, IV, IU.
|
||
/// the attribute is read only if the input wave contains exactly one item,
|
||
/// i.e. either the measurement is a single image, or the attribute has string type.
|
||
///
|
||
/// @param datawavename name of the attribute wave in the current folder.
|
||
/// can be text or numeric.
|
||
/// @param source source identifier (EPICS name) of the attribute.
|
||
/// @param idest destination index in IN, ID, IV, IU where the results are written.
|
||
/// the variable is incremented if data was written, otherwise it is left unchanged.
|
||
/// make sure IN, ID, IV, IU have at least idest + 1 elements.
|
||
///
|
||
static function read_attribute_info(datawavename, source, idest)
|
||
string datawavename // name of the attribute wave in the current folder.
|
||
// can be text or numeric.
|
||
string source
|
||
// source identifier (EPICS name) of the attribute.
|
||
variable &idest
|
||
// destination index in IN, ID, IV, IU where the results are written.
|
||
// the variable is incremented if data was written, otherwise it is left unchanged.
|
||
// make sure IN, ID, IV, IU have at least idest + 1 elements.
|
||
|
||
wave /t IN
|
||
wave /t ID
|
||
wave /t IV
|
||
wave /t IU
|
||
|
||
variable n_attr
|
||
string s_attr
|
||
|
||
if (exists(datawavename) == 1)
|
||
if (strlen(source) > 0)
|
||
Note $datawavename, "PV=" + source
|
||
endif
|
||
switch(WaveType($datawavename, 1))
|
||
case 1: // numeric
|
||
wave w_attr = $datawavename
|
||
n_attr = numpnts(w_attr)
|
||
sprintf s_attr, "%.12g", w_attr[0]
|
||
break
|
||
case 2: // text
|
||
wave /t wt_attr = $datawavename
|
||
n_attr = numpnts(wt_attr)
|
||
s_attr = wt_attr[0]
|
||
break
|
||
default: // unknown
|
||
n_attr = 0
|
||
endswitch
|
||
if (n_attr == 1)
|
||
IN[idest] = source
|
||
ID[idest] = datawavename
|
||
IV[idest] = s_attr
|
||
IU[idest] = "" // we don't get the units
|
||
idest += 1
|
||
endif
|
||
endif
|
||
end
|
||
|
||
/// set the energy and angle scales of an area detector dataset from the Scienta analyser.
|
||
///
|
||
/// the dimension labels of the energy and angle scales must be set correctly:
|
||
/// AD_Dim0 = energy dimension; AD_Dim1 = angle dimension.
|
||
/// these dimensions must be the first two dimensions of a multi-dimensional dataset.
|
||
/// normally, AD_Dim0 is the X dimension, and AD_Dim1 the Y dimension.
|
||
///
|
||
function adh5_scale_scienta(data)
|
||
wave data
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
|
||
dfref dataDF = GetWavesDataFolderDFR(data)
|
||
dfref attrDF = GetAttrDataFolderDFR(data)
|
||
|
||
wave /SDFR=attrDF LensMode
|
||
wave /SDFR=attrDF /Z ChannelBegin, ChannelEnd
|
||
wave /SDFR=attrDF /Z SliceBegin, SliceEnd
|
||
|
||
variable EDim, ADim
|
||
variable ELow, EHigh, ALow, AHigh
|
||
string EUnit, AUnit
|
||
|
||
// which dimension is angle and which one is energy?
|
||
strswitch(GetDimLabel(data, 0, -1))
|
||
case "AD_Dim0":
|
||
EDim = 0
|
||
break
|
||
case "AD_Dim1":
|
||
EDim = 1
|
||
break
|
||
default:
|
||
EDim = -1
|
||
endswitch
|
||
strswitch(GetDimLabel(data, 1, -1))
|
||
case "AD_Dim0":
|
||
ADim = 0
|
||
break
|
||
case "AD_Dim1":
|
||
ADim = 1
|
||
break
|
||
default:
|
||
ADim = -1
|
||
endswitch
|
||
|
||
// defaults (point scaling)
|
||
if (EDim >= 0)
|
||
ELow = dimoffset(data, EDim)
|
||
EHigh = dimoffset(data, EDim) + dimdelta(data, EDim) * (dimsize(data, EDim) - 1)
|
||
EUnit = "eV"
|
||
endif
|
||
if (ADim >= 0)
|
||
ALow = dimoffset(data, ADim)
|
||
AHigh = dimoffset(data, ADim) + dimdelta(data, ADim) * (dimsize(data, ADim) - 1)
|
||
AUnit = "arb."
|
||
endif
|
||
|
||
// lens mode can give more detail
|
||
if (waveexists(LensMode) && (numpnts(LensMode) >= 1))
|
||
switch(LensMode[0])
|
||
case 1: // Angular45
|
||
ALow = -45/2
|
||
AHigh = +45/2
|
||
AUnit = "<22>"
|
||
break
|
||
case 2: // Angular60
|
||
ALow = -60/2
|
||
AHigh = +60/2
|
||
AUnit = "<22>"
|
||
break
|
||
endswitch
|
||
endif
|
||
|
||
// best option if scales are explicit in separate waves
|
||
if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1))
|
||
ELow = ChannelBegin[0]
|
||
EHigh = ChannelEnd[0]
|
||
endif
|
||
if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1))
|
||
ALow = SliceBegin[0]
|
||
AHigh = SliceEnd[0]
|
||
endif
|
||
|
||
// apply new scales
|
||
switch(EDim)
|
||
case 0:
|
||
setscale /i x ELow, EHigh, EUnit, data
|
||
break
|
||
case 1:
|
||
setscale /i y ELow, EHigh, EUnit, data
|
||
break
|
||
endswitch
|
||
switch(ADim)
|
||
case 0:
|
||
setscale /i x ALow, AHigh, AUnit, data
|
||
break
|
||
case 1:
|
||
setscale /i y ALow, AHigh, AUnit, data
|
||
break
|
||
endswitch
|
||
|
||
setscale d 0, 0, "arb.", data
|
||
|
||
setdatafolder saveDF
|
||
end
|
||
|
||
/// scales the extra dimensions of an area detector dataset according to the EPICS scan
|
||
///
|
||
/// the scan positioner name and its values must be available
|
||
///
|
||
/// @todo incomplete
|
||
///
|
||
function adh5_scale_scan(data)
|
||
wave data
|
||
|
||
dfref saveDF = GetDataFolderDFR()
|
||
|
||
dfref dataDF = GetWavesDataFolderDFR(data)
|
||
wave /SDFR=dataDF AcquisitionMode, DetectorMode, EnergyMode
|
||
|
||
wave /SDFR=dataDF /z Scan1Active, Scan2Active
|
||
wave /SDFR=dataDF /t /z Scan1Positioner1, Scan1Readback1
|
||
wave /SDFR=dataDF /t /z Scan1Positioner2, Scan1Readback2
|
||
wave /SDFR=dataDF /t /z Scan2Positioner1, Scan2Readback1
|
||
wave /SDFR=dataDF /t /z Scan2Positioner2, Scan2Readback2
|
||
|
||
// TODO : search the data folder for positioner waves,
|
||
// i.e. waves with the PV name corresponding to Scan1Positioner1 in their wave note.
|
||
wave /z zscale
|
||
|
||
strswitch(GetDimLabel(data, 0, -1))
|
||
case "AD_DimN":
|
||
setscale /i x zscale[0], zscale[numpnts(zscale)-1], "", data
|
||
break
|
||
endswitch
|
||
strswitch(GetDimLabel(data, 1, -1))
|
||
case "AD_DimN":
|
||
setscale /i y zscale[0], zscale[numpnts(zscale)-1], "", data
|
||
break
|
||
endswitch
|
||
strswitch(GetDimLabel(data, 2, -1))
|
||
case "AD_DimN":
|
||
setscale /i z zscale[0], zscale[numpnts(zscale)-1], "", data
|
||
break
|
||
endswitch
|
||
|
||
setdatafolder saveDF
|
||
end
|