#pragma rtGlobals=3 // Use modern global access method and strict wave access. #pragma IgorVersion = 6.2 #pragma ModuleName = PearlPShellImport #pragma version = 1.02 #include #include "pearl-gui-tools" #include "pearl-area-import" // copyright (c) 2013-16 Paul Scherrer Institut // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http:///www.apache.org/licenses/LICENSE-2.0 /// @file /// @brief import data from PShell /// @ingroup ArpesPackage /// /// /// HDF5 file import from the PShell data acquisition program. /// the main import functions are: /// /// - psh5_load_complete() /// load all scans and datasets from a file. /// /// - psh5_load_reduced() /// load the ScientaImage dataset of the first scan and reduce its dimensionality. /// /// - psh5_load_scan_complete() /// load all datasets of a selected scan. /// /// - psh5_load_scan_preview() /// load a preview of a selected scan. /// /// - psh5_load_dataset() /// load a selected dataset. /// /// - psh5_load_dataset_reduced() /// load a selected dataset and reduce its dimensionality. /// /// the following helper functions are also needed: /// /// - psh5_open_file() /// - psh5_close_file() /// - psh5_list_scans() /// - psh5_list_scan_datasets() /// - psh5_load_scan_meta() /// - psh5_load_scan_attrs() /// @namespace PearlPShellImport /// @brief import data from PShell /// /// PearlPShellImport is declared in @ref pearl-pshell-import.ipf. /// Dimension label for the energy dispersive dimension of multi-dimensional datasets strconstant kEnergyDimLabel = "energy" /// Dimension label for the angle dispersive dimension of multi-dimensional datasets strconstant kAngleDimLabel = "angle" /// Dimension label for the scan dimension of multi-dimensional datasets strconstant kScanDimLabel = "scan" /// Dimension label for the data dimension. /// This label may be used to store the parameters for the `setscale d` operation. strconstant kDataDimLabel = "data" /// open a HDF5 file created by the PShell data acquisition program and prepare the data folder. /// /// the function opens a specified or interactively selected HDF5 file, /// creates a data folder `$ANickName` under root, /// and changes to the new data folder. /// /// the file must be closed by psh5_close_file() after use. /// /// @param ANickName destination folder name (top level under root). /// /// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed /// /// @param AFileName if empty a dialog box shows up /// /// @return ID of open HDF5 file from HDF5OpenFile. /// zero if an error occurred. /// /// @return global string s_filepath in new data folder contains the full file path on disk. /// /// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. /// function psh5_open_file(ANickName, APathName, AFileName) string ANickName string APathName string AFileName setdatafolder root: newdatafolder /s /o $("root:" + ANickName) dfref fileDF = GetDataFolderDFR() variable fileID HDF5OpenFile /P=$APathName /R fileID as AFileName if (v_flag == 0) string /g s_filepath string /g s_scanpaths s_filepath = s_path + s_filename s_scanpaths = psh5_list_scans(fileID) else fileID = 0 endif return fileID end /// close a HDF5 file opened by psh5_open_file. /// /// this function just closes the HDF5 file. /// no change is made to the loaded data. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// function psh5_close_file(fileID) variable fileID HDF5CloseFile fileID end /// load everything from a PShell data file. /// /// @param ANickName destination folder name (top level under root) /// /// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed /// /// @param AFileName if empty a dialog box shows up /// /// @param load_data select whether datasets (positioners and detectors) are loaded. /// @arg 1 (default) load data. /// @arg 0 do not load data. /// /// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. /// for proper wave scaling, the attributes must be loaded. /// @arg 1 (default) load attributes. /// @arg 0 do not load attributes. /// /// @return complete path of the loaded file if successful. /// empty string otherwise. /// /// @return global string s_filepath in new data folder contains the full file path on disk. /// /// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. /// function /s psh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr]) string ANickName string APathName string AFileName variable load_data variable load_attr if (ParamIsDefault(load_data)) load_data = 1 endif if (ParamIsDefault(load_attr)) load_attr = 1 endif dfref saveDF = GetDataFolderDFR() // performance monitoring variable timerRefNum variable /g psh5_perf_secs timerRefNum = startMSTimer variable fileID = psh5_open_file(ANickName, APathName, AFileName) if (fileID) dfref fileDF = GetDataFolderDFR() svar s_filepath svar s_scanpaths AFileName = s_filepath print "loading " + s_filepath + "\r" variable ig variable ng = ItemsInList(s_scanpaths, ";") string sg string folder for (ig = 0; ig < ng; ig += 1) sg = StringFromList(ig, s_scanpaths, ";") folder = CleanupName(ReplaceString("/", sg, ""), 0) setdatafolder fileDF newdatafolder /s /o $folder psh5_load_scan_complete(fileID, sg, load_data=load_data, load_attr=load_attr) endfor psh5_close_file(fileID) else AFileName = "" endif psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 setdatafolder saveDF return AFileName end /// load a preview image from a PShell data file. /// /// the data wave is loaded into the current data folder. /// attributes are loaded into the attr subfolder. existing waves in attr are deleted. /// /// @warning EXPERIMENTAL /// this function uses the root:pearl_area:preview data folder. existing data there may be deleted! /// /// @param ANickName destination wave name. the wave is created in the current data folder. /// /// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed /// /// @param AFileName if empty a dialog box shows up /// /// @param load_data 1 (default): load data; 0: do not load data /// /// @param load_attr 1 (default): load attributes; 0: do not load attributes /// note: for correct scaling of the image, the attributes need to be loaded /// /// @return name of loaded preview wave. /// function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr]) string ANickName string APathName string AFileName variable load_data variable load_attr if (ParamIsDefault(load_data)) load_data = 1 endif if (ParamIsDefault(load_attr)) load_attr = 1 endif dfref saveDF = GetDataFolderDFR() setdatafolder root: newdatafolder /o/s pearl_area newdatafolder /o/s preview variable fileID string scanpaths = "" string dataname = "" // performance monitoring variable timerRefNum variable /g adh5_perf_secs timerRefNum = startMSTimer HDF5OpenFile /P=$APathName /R /Z fileID as AFileName if (v_flag == 0) AFileName = s_path + s_filename dfref fileDF = GetDataFolderDFR() scanpaths = psh5_list_scans(fileID) variable ig string sg ig = 0 sg = StringFromList(ig, scanpaths, ";") dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr) wave /z data = $dataname string destpath = GetDataFolder(1, saveDF) + ANickName if (waveexists(data)) duplicate /o data, $destpath wave /z data = $destpath endif if (load_attr) setdatafolder saveDF newdatafolder /o/s attr killwaves /a/z psh5_load_scan_attrs(fileID, sg) setdatafolder :: endif HDF5CloseFile fileID endif if (timerRefNum >= 0) adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 endif setdatafolder saveDF return dataname end /// load all data of a selected scan from a PShell data file. /// /// data is loaded into the current data folder. /// attribute datasets are loaded into sub-folder `attr`. /// existing data, if present, is overwritten. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @param load_data select whether datasets (positioners and detectors) are loaded. /// @arg 1 (default) load data. /// @arg 0 do not load data. /// /// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. /// for proper wave scaling, the attributes must be loaded. /// @arg 1 (default) load attributes. /// @arg 0 do not load attributes. /// /// @return semicolon-separated list of the loaded data waves (excluding attributes). /// function /s psh5_load_scan_complete(fileID, scanpath, [load_data, load_attr]) variable fileID string scanpath variable load_data variable load_attr if (ParamIsDefault(load_data)) load_data = 1 endif if (ParamIsDefault(load_attr)) load_attr = 1 endif dfref saveDF = GetDataFolderDFR() dfref dataDF = GetDataFolderDFR() string wavenames string attrnames psh5_load_scan_meta(fileID, scanpath) if (load_data) wavenames = psh5_load_scan_data(fileID, scanpath) endif if (load_attr) newdatafolder /s /o attr attrnames = psh5_load_scan_attrs(fileID, scanpath) endif if (load_data && load_attr) setdatafolder dataDF ps_scale_datasets() endif setdatafolder saveDF return wavenames end /// list scan groups of a PShell data file. /// /// the function returns a list of all top-level groups whose name starts with "scan". /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @return semicolon-separated list of group paths. /// function /s psh5_list_scans(fileID) variable fileID HDF5ListGroup /F /TYPE=1 fileID, "/" variable ig variable ng = ItemsInList(S_HDF5ListGroup, ";") string sg string scans = "" for (ig = 0; ig < ng; ig += 1) sg = StringFromList(ig, S_HDF5ListGroup, ";") if (cmpstr(sg[1,4], "scan") == 0) scans = AddListItem(sg, scans, ";", inf) endif endfor return scans end /// list datasets of a PShell scan group. /// /// the function returns a list of all datasets of the selected scan. /// this does not include datasets from the attributes sub-group. /// /// @note in a future version, an option may be introduced to filter datasets by function (_Readable_ and/or _Writable_). /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @return semicolon-separated list of dataset paths. /// function /s psh5_list_scan_datasets(fileID, scanpath) variable fileID string scanpath HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath return S_HDF5ListGroup end /// load all datasets of a PShell scan group. /// /// data is loaded into the current data folder. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @return semicolon-separated list of the loaded waves. /// function /s psh5_load_scan_data(fileID, scanpath) variable fileID string scanpath string wavenames = "" HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath if (!v_flag) variable ids variable nds = ItemsInList(S_HDF5ListGroup, ";") string sds string sw for (ids = 0; ids < nds; ids += 1) sds = StringFromList(ids, S_HDF5ListGroup, ";") sw = psh5_load_dataset(fileID, "", sds, set_scale=0) wavenames = AddListItem(sw, wavenames, ";", inf) endfor endif return wavenames end /// load attributes of a PShell scan group. /// /// "attributes" are the auxiliary data inside the attrs group. /// do not confuse with HDF5 attributes! /// HDF5 attributes are loaded by the psh5_load_scan_meta() function. /// /// data is loaded into the current data folder. /// this should normally be the `:attr` folder inside the respective scan folder. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @param attr_sets specify the attribute sets to be loaded. /// this value can be an arithmetic OR of the following constants. /// by default, all attributes are loaded. /// @arg 1 all datasets that are present in the file. /// @arg 2 datasets relevant for wave scaling of Scienta data. /// /// @return semicolon-separated list of the loaded waves. /// function /s psh5_load_scan_attrs(fileID, scanpath, [attr_sets]) variable fileID string scanpath variable attr_sets if (ParamIsDefault(attr_sets)) attr_sets = 1 endif string attr_path = ReplaceString("//", scanpath + "/attrs", "/") string attr_list = "" if (attr_sets & 1) HDF5ListGroup /TYPE=2 /Z fileID, attr_path if (!v_flag) attr_list = S_HDF5ListGroup endif endif if (attr_sets & 2) attr_list = AddListItem("LensMode", attr_list, ";", inf) attr_list = AddListItem("ScientaChannelBegin", attr_list, ";", inf) attr_list = AddListItem("ScientaChannelEnd", attr_list, ";", inf) attr_list = AddListItem("ScientaSliceBegin", attr_list, ";", inf) attr_list = AddListItem("ScientaSliceEnd", attr_list, ";", inf) endif variable ids variable nds = ItemsInList(attr_list, ";") string sds string wavenames = "" for (ids = 0; ids < nds; ids += 1) sds = StringFromList(ids, attr_list, ";") HDF5LoadData /O /Q /Z fileID, attr_path + "/" + sds if (!v_flag) wavenames = AddListItem(s_wavenames, wavenames, ";", inf) endif endfor return wavenames end /// load metadata of a PShell scan group. /// /// _metadata_ are the HDF5 attributes attached to the scan group. /// the following attributes are loaded. /// the respective wave names under Igor are given in parentheses. /// /// - Dimensions (ScanDimensions) /// - Writables (ScanWritables) /// - Readables (ScanReadables) /// - Steps (ScanSteps) /// /// if they are missing in the file, `ScanDimensions` and `ScanReadables` are set to default values /// assuming the file contains a single spectrum. /// /// data is loaded into the current data folder. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @return semicolon-separated list of the loaded waves. /// function /s psh5_load_scan_meta(fileID, scanpath) variable fileID string scanpath string wavenames = "" HDF5LoadData /O /Q /Z /A="Dimensions" /N=ScanDimensions /TYPE=1 fileID, scanpath if (!v_flag) wavenames = AddListItem(s_wavenames, wavenames, ";", inf) HDF5LoadData /O /Q /Z /A="Writables" /N=ScanWritables /TYPE=1 fileID, scanpath if (!v_flag) wavenames = AddListItem(s_wavenames, wavenames, ";", inf) endif HDF5LoadData /O /Q /Z /A="Readables" /N=ScanReadables /TYPE=1 fileID, scanpath if (!v_flag) wavenames = AddListItem(s_wavenames, wavenames, ";", inf) endif HDF5LoadData /O /Q /Z /A="Steps" /N=ScanSteps /TYPE=1 fileID, scanpath if (!v_flag) wavenames = AddListItem(s_wavenames, wavenames, ";", inf) endif else make /n=1 /o ScanDimensions ScanDimensions = 0 wavenames = AddListItem("ScanDimensions", wavenames, ";", inf) make /n=1 /o /t ScanReadables ScanReadables[0] = "ScientaSpectrum" wavenames = AddListItem("ScanReadables", wavenames, ";", inf) endif return wavenames end /// load a dataset from an open PShell HDF5 file. /// /// if the dataset has a maximum of two dimensions, the function loads it at once. /// if it has more than two dimension, the function calls psh5_load_dataset_slabs() to load the data slab by slab. /// /// - the metadata (HDF5 attributes) are loaded into the wave note, cf. psh5_load_dataset_meta(). /// - dimension labels are set according the dataset name, cf. ps_set_dimlabels(). /// - wave scaling is set if the necessary scan attributes have been loaded and the `set_scale` option is selected (default). /// the attributes must be loaded by psh5_load_scan_meta() and psh5_load_scan_attrs() (attr_sets=2). /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @param dataset name of the dataset. /// the name of the loaded wave is a cleaned up version of the dataset name. /// /// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. /// if multiple datasets are loaded from a file, /// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). /// @arg 1 (default) set the wave scaling. /// @arg 0 do not set the wave scaling. /// /// @return name of loaded wave if successful. empty string otherwise. /// function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale]) variable fileID string scanpath string datasetname variable set_scale string datasetpath datasetpath = scanpath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" return "" endif string dataname if (di.ndims < 3) HDF5LoadData /O /Q /Z fileID, datasetpath dataname = StringFromList(0, S_waveNames) else dataname = psh5_load_dataset_slabs(fileID, scanpath, datasetname) endif wave /z data = $dataname if (waveexists(data)) psh5_load_dataset_meta(fileID, scanpath, datasetname, data) ps_set_dimlabels(data) if (set_scale) ps_scale_dataset(data) endif else dataname = "" endif return dataname end /// load a preview dataset from an open PShell HDF5 file. /// /// if the dataset has a maximum of two dimensions, the function loads it at once. /// if it has more than two dimension, the function selects and loads one two-dimensional slab. /// /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". /// /// @param dataset name of the dataset. /// the name of the loaded wave is a cleaned up version of the dataset name. /// /// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. /// if multiple datasets are loaded from a file, /// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). /// @arg 1 (default) set the wave scaling. /// @arg 0 do not set the wave scaling. /// /// @return name of loaded wave if successful. empty string otherwise. /// function /s psh5_load_scan_preview(fileID, scanpath, [set_scale]) variable fileID string scanpath variable set_scale dfref saveDF = GetDataFolderDFR() dfref dataDF = saveDF string datasets = psh5_list_scan_datasets(fileID, scanpath) string datasetname = "" // todo: this should be generalized if (strsearch(datasets, "ScientaImage", 0) >= 0) datasetname = "ScientaImage" elseif (strsearch(datasets, "ScientaSpectrum", 0) >= 0) datasetname = "ScientaSpectrum" elseif (strsearch(datasets, "ScientaEnergyDistribution", 0) >= 0) datasetname = "ScientaEnergyDistribution" elseif (strsearch(datasets, "Counts", 0) >= 0) datasetname = "Counts" elseif (strsearch(datasets, "SampleCurrent", 0) >= 0) datasetname = "SampleCurrent" else datasetname = StringFromList(0, datasets) endif string datasetpath datasetpath = scanpath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" return "" endif string dataname if (di.ndims < 3) HDF5LoadData /O /Q /Z fileID, datasetpath dataname = StringFromList(0, S_waveNames) else variable dim2start = 0 variable dim2count = 1 variable dim3start = 0 variable dim3count = 1 if (di.ndims >= 3) dim2start = floor(di.dims[2] / 2) dim2count = 1 endif if (di.ndims >= 4) dim3start = floor(di.dims[3] / 2) dim3count = 1 endif dataname = psh5_load_dataset_slab(fileID, scanpath, datasetname, dim2start, dim2count, dim3start, dim3count) endif wave /z data = $dataname if (waveexists(data)) if (set_scale) setdatafolder dataDF newdatafolder /o/s attr killwaves /a/z psh5_load_scan_attrs(fileID, scanpath, attr_sets=2) setdatafolder dataDF ps_scale_dataset(data) endif else dataname = "" endif return dataname end /// load metadata of a PShell dataset. /// /// "metadata" are the HDF5 attributes attached to the scan dataset. /// /// data is added to the wave note. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param datapath path to the containing group in the HDF5 file. /// path separator is the slash "/". /// /// @param dataset name of the dataset. /// /// @param datawave metadata is added to the wave note of this wave. /// /// @return 0 if successful, non-zero if an error occurred. /// function psh5_load_dataset_meta(fileID, datapath, datasetname, datawave) variable fileID string datapath string datasetname wave datawave dfref saveDF = GetDataFolderDFR() SetDataFolder NewFreeDataFolder() string datasetpath = datapath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") string wnote HDF5LoadData /O /Q /Z /A="Writable Dimension" /N=WriteDim fileID, datasetpath if (!v_flag) wave WriteDim // scan dimension starts at 1 sprintf wnote, "ScanDimension=%u", WriteDim[0] Note datawave, wnote endif HDF5LoadData /O /Q /Z /A="Writable Index" /N=WriteIndex fileID, datasetpath if (!v_flag) wave WriteIndex sprintf wnote, "WriteableIndex=%u", WriteIndex[0] Note datawave, wnote endif HDF5LoadData /O /Q /Z /A="Readable Index" /N=ReadIndex fileID, datasetpath if (!v_flag) wave ReadIndex sprintf wnote, "ReadableIndex=%u", ReadIndex[0] Note datawave, wnote endif setdatafolder saveDF return 0 end /// load a dataset slab-wise from the open PShell HDF5 file. /// /// the function loads the dataset image by image using the hyperslab option. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param datapath path to the containing group in the HDF5 file. /// path separator is the slash "/". /// /// @param dataset name of the dataset. /// also defines the name of the loaded wave. /// /// @param progress select whether a progress window is displayed during the process. /// @arg 1 (default) show progress window. /// @arg 0 do not show progress window. /// /// @return name of loaded wave if successful. empty string otherwise. /// function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) variable fileID string datapath string datasetname variable progress if (ParamIsDefault(progress)) progress = 1 endif variable result = 0 string datasetpath string datawavename datasetpath = datapath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" return "" endif if (di.ndims < 2) print "error: rank of dataset < 2" return "" endif variable idx, idy, idz, idt, izt idx = 1 idy = 0 idz = 2 idt = 3 variable nx, ny, nz, nt, nzt nx = di.dims[idx] ny = di.dims[idy] nz = di.dims[idz] nt = di.dims[idt] make /n=(nx,ny,nz,nt) /o $datawavename wave data = $datawavename nz = max(nz, 1) nt = max(nt, 1) nzt = nz * nt izt = 0 if (progress) display_progress_panel("HDF5 Import", "Loading data...", nzt) endif // load data image by image HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) wave slab slab[][%Start] = 0 slab[][%Stride] = 1 slab[][%Count] = 1 slab[][%Block] = 1 slab[idx][%Block] = nx slab[idy][%Block] = ny variable iz, it for (iz = 0; iz < nz; iz += 1) for (it = 0; it < nt; it += 1) slab[idz][%Start] = iz slab[idt][%Start] = it HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath wave slabdata // 2D, 3D, or 4D with singletons data[][][iz][it] = slabdata[q][p][0][0] // progress window izt += 1 if (progress) if (update_progress_panel(izt)) result = -4 // user abort break endif endif endfor if (result < 0) break endif endfor if (progress) kill_progress_panel() endif killwaves /z slab, slabdata if (!result) ps_set_dimlabels(data) return datawavename else killwaves /z data return "" endif end /// load a single image from the open PShell data file. /// /// the function can average over a region in the extra dimensions. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param datapath path to the containing group in the HDF5 file. /// path separator is the slash "/". /// /// @param dataset name of the dataset. /// also defines the name of the loaded wave. /// /// @param dim2start 2nd dimension coordinate of the first image /// set to 0 if dimension may not be present /// /// @param dim2count number of subsequent images to average /// set to 1 if dimension may not be present /// /// @param dim3start 3rd dimension coordinate of the first image /// set to 0 if dimension may not be present /// /// @param dim3count number of subsequent images to average /// set to 1 if dimension may not be present /// /// @return name of loaded wave if successful. empty string otherwise. /// function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim2count, dim3start, dim3count) variable fileID string datapath string datasetname variable dim2start variable dim2count variable dim3start variable dim3count string datasetpath string datawavename datasetpath = datapath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") STRUCT HDF5DataInfo di InitHDF5DataInfo(di) variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" return "" endif if (di.ndims < 2) print "error: rank of dataset < 2" return "" endif variable idx, idy, idz, idt idx = 1 idy = 0 idz = 2 idt = 3 variable nx, ny nx = di.dims[idx] ny = di.dims[idy] make /n=(nx,ny) /o $datawavename wave data = $datawavename data = 0 HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) wave slab slab[][%Start] = 0 slab[][%Stride] = 1 slab[][%Count] = 1 slab[][%Block] = 1 slab[idx][%Block] = nx slab[idy][%Block] = ny variable iz, it variable navg = 0 variable dim2end = dim2start + dim2count - 1 variable dim3end = dim3start + dim3count - 1 for (iz = dim2start; iz <= dim2end; iz += 1) for (it = dim3start; it <= dim3end; it += 1) slab[idz][%Start] = iz slab[idt][%Start] = it HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath if (!v_flag) wave slabdata data += slabdata[q][p][0][0] navg += 1 endif endfor endfor if (navg) data /= navg endif killwaves /z slab, slabdata ps_set_dimlabels(data) return datawavename end /// set dimension labels according to the axis type /// /// this function asserts a particular ordering of dimensions types /// based on the name of the wave for /// ScientaImage, ScientaSpectrum, ImageAngleDistribution, ImageEnergyDistribution. /// all other waves must be one-dimensional, and the dimension must be the scan dimension. /// /// dimension labels are required by scaling functions. /// function ps_set_dimlabels(data) wave data string name = NameOfWave(data) // intrinsic dimensions strswitch(name) case "ScientaImage": setdimlabel 0, -1, $kEnergyDimLabel, data setdimlabel 1, -1, $kAngleDimLabel, data if (WaveDims(data) >= 3) setdimlabel 2, -1, $kScanDimLabel, data endif break case "ScientaSpectrum": setdimlabel 0, -1, $kEnergyDimLabel, data break case "ImageAngleDistribution": setdimlabel 0, -1, $kScanDimLabel, data setdimlabel 1, -1, $kAngleDimLabel, data break case "ImageEnergyDistribution": setdimlabel 0, -1, $kScanDimLabel, data setdimlabel 1, -1, $kEnergyDimLabel, data break default: setdimlabel 0, -1, $kScanDimLabel, data endswitch end /// set the dimension scales of loaded PShell Scienta datasets according to attributes. /// /// the datasets must be in the current data folder. /// all datasets listed in the ScanReadables waves are scaled /// according to the attribute waves in the :attr folder. /// /// the dimension labels of the dataset waves must be set correctly, e.g. by ps_set_dimlabels(). /// this is implicitly done by the high-level load functions. /// function ps_scale_datasets() dfref dataDF = GetDataFolderDFR() dfref attrDF = :attr make /n=3 /free lo, hi make /n=3 /t /free un ps_detect_scale(lo, hi, un) wave /t /z /SDFR=dataDF ScanReadables if (WaveExists(ScanReadables)) variable isr variable nsr = numpnts(ScanReadables) string ssr for (isr = 0; isr < nsr; isr += 1) wave /z /SDFR=dataDF wsr = $ScanReadables[isr] if (WaveExists(wsr)) ps_scale_dataset_2(wsr, lo, hi, un) endif endfor endif end /// set the dimension scales of a loaded PShell Scienta dataset according to attributes. /// /// the attributes must be in the child folder `:attr` next to the dataset. /// /// the dimension labels of the dataset waves must be set correctly, cf. ps_set_dimlabels(). /// this is implicitly done by the high-level load functions. /// /// the function is useful if a single dataset is loaded and scaled. /// if multiple datasets are loaded, ps_scale_datasets() is slightly more efficient. /// /// @param data data wave to be scaled. /// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels(). /// function ps_scale_dataset(data) wave data dfref saveDF = GetDataFolderDFR() dfref dataDF = GetWavesDataFolderDFR(data) setdatafolder dataDF make /n=3 /free lo, hi make /n=3 /t /free un ps_detect_scale(lo, hi, un) ps_scale_dataset_2(data, lo, hi, un) setdatafolder saveDF end /// detect the dimension scales from attributes. /// /// the function checks the current data folder and the sub-folder `:attr` for scan parameters. /// the results are written to the provided waves. /// the function is normally called by ps_scale_datasets() but can also be used independently. /// /// the provided waves are redimensioned by the function, and dimension labels are set. /// the scale parameters can then be extracted by keyword, e.g., /// @arg `lo[%%energy]` analyser energy dimension. /// @arg `lo[%%angle]` analyser angle dimension. /// @arg `lo[%%scan]` scan dimension. /// @arg `lo[%%data]` data dimension (units). /// /// the function tries to read the following waves, /// and may fall back to more or less reasonable default values if they are not found. /// @arg `:attr:LensMode` /// @arg `:attr:ScientaChannelBegin` /// @arg `:attr:ScientaChannelEnd` /// @arg `:attr:ScientaSliceBegin` /// @arg `:attr:ScientaSliceEnd` /// @arg `ScanWritables` /// @arg wave referenced by `ScanWritables[0]` /// /// @param lo wave to receive the lower limits. /// /// @param hi wave to receive the upper limits. /// /// @param un text wave to receive the unit labels. /// /// @return the function results are written to the lo, hi, un waves. /// function ps_detect_scale(lo, hi, un) wave lo wave hi wave /t un dfref dataDF = GetDataFolderDFR() dfref attrDF = :attr redimension /n=4 lo, hi, un setdimlabel 0, 0, $kEnergyDimLabel, lo, hi, un setdimlabel 0, 1, $kAngleDimLabel, lo, hi, un setdimlabel 0, 2, $kScanDimLabel, lo, hi, un setdimlabel 0, 3, $kDataDimLabel, lo, hi, un // default values lo[%$kEnergyDimLabel] = 0 hi[%$kEnergyDimLabel] = 1 un[%$kEnergyDimLabel] = "eV" lo[%$kAngleDimLabel] = -1 hi[%$kAngleDimLabel] = 1 un[%$kAngleDimLabel] = "" lo[%$kScanDimLabel] = 0 hi[%$kScanDimLabel] = 1 un[%$kScanDimLabel] = "" lo[%$kDataDimLabel] = 0 hi[%$kDataDimLabel] = 0 un[%$kDataDimLabel] = "arb." wave /SDFR=attrDF /T /Z LensMode wave /SDFR=attrDF /Z ChannelBegin = ScientaChannelBegin wave /SDFR=attrDF /Z ChannelEnd = ScientaChannelEnd wave /SDFR=attrDF /Z SliceBegin = ScientaSliceBegin wave /SDFR=attrDF /Z SliceEnd = ScientaSliceEnd // lens mode can give more detail if (waveexists(LensMode) && (numpnts(LensMode) >= 1)) strswitch(LensMode[0]) case "Angular45": lo[%$kAngleDimLabel] = -45/2 hi[%$kAngleDimLabel] = +45/2 un[%$kAngleDimLabel] = "deg" break case "Angular60": lo[%$kAngleDimLabel] = -60/2 hi[%$kAngleDimLabel] = +60/2 un[%$kAngleDimLabel] = "deg" break case "Transmission": un[%$kAngleDimLabel] = "arb." break endswitch endif // best option if scales are explicit in separate waves if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1)) lo[%$kEnergyDimLabel] = ChannelBegin[0] hi[%$kEnergyDimLabel] = ChannelEnd[0] endif if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1)) lo[%$kAngleDimLabel] = SliceBegin[0] hi[%$kAngleDimLabel] = SliceEnd[0] endif wave /z /t /SDFR=dataDF ScanWritables if (WaveExists(ScanWritables)) wave /z /SDFR=dataDF scanner = $ScanWritables[0] if (!WaveExists(scanner)) wave /z /SDFR=attrDF scanner = $ScanWritables[0] endif if (WaveExists(scanner)) lo[%$kScanDimLabel] = scanner[0] hi[%$kScanDimLabel] = scanner[numpnts(scanner)-1] endif endif end /// set the dimension scales of a dataset. /// /// the function is normally called by ps_scale_datasets() but can also be used independently. /// the limits and units must be given as function arguments with proper dimension labels. /// /// the provided limit and unit waves must have dimension labels /// matching the -1 index dimension labels of the data wave, /// such as set by the ps_detect_scale() function. /// the scale parameters are extracted by keyword, e.g., /// @arg `lo[%%energy]` analyser energy dimension. /// @arg `lo[%%angle]` analyser angle dimension. /// @arg `lo[%%scan]` scan dimension. /// @arg `lo[%%data]` data dimension. /// /// @param data data wave to be scaled. /// dimension labels (index -1) must be set to match the limit waves. /// /// @param lo lower limits. /// /// @param hi upper limits. /// /// @param un unit labels. /// function ps_scale_dataset_2(data, lo, hi, un) wave data wave lo wave hi wave /t un string sdim sdim = GetDimLabel(data, 0, -1) if (strlen(sdim)) setscale /i x lo[%$sdim], hi[%$sdim], un[%$sdim], data endif sdim = GetDimLabel(data, 1, -1) if (strlen(sdim)) setscale /i y lo[%$sdim], hi[%$sdim], un[%$sdim], data endif sdim = GetDimLabel(data, 2, -1) if (strlen(sdim)) setscale /i z lo[%$sdim], hi[%$sdim], un[%$sdim], data endif setscale d 0, 0, un[%$kDataDimLabel], data end /// load and reduce the ScientaImage dataset of the first scan of a PShell data file. /// /// the resulting dataset is reduced in one image dimension by a user-defined reduction function, /// e.g. by region-of-interest integration, curve fitting, etc. /// /// the function loads the dataset image by image using the hyperslab option /// and applies a custom reduction function to each image. /// the results from the reduction function are composed into one result wave. /// the raw data are discarded. /// /// if the data is from the electron analyser driver and some special attributes are included, /// the function will set the scales of the image dimensions. /// /// @param ANickName destination folder name (top level under root). /// /// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed. /// /// @param AFileName if empty a dialog box shows up. /// /// @param reduction_func custom reduction function /// (any user-defined function which has the same parameters as adh5_default_reduction()) /// /// @param reduction_param parameter string for the reduction function. /// /// @param progress progress window. /// @arg 1 (default) show progress window /// @arg 0 do not show progress window /// /// @return semicolon-separated list of the loaded waves, /// `ReducedData1` and `ReducedData2` if successful. /// empty string if an error occurred. /// error messages are printed to the history. /// /// @return global string s_filepath in new data folder contains the full file path on disk. /// /// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. /// /// @todo load scan positions. /// function /s psh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [progress]) string ANickName string APathName string AFileName funcref adh5_default_reduction reduction_func string reduction_param variable progress if (ParamIsDefault(progress)) progress = 1 endif dfref saveDF = GetDataFolderDFR() // performance monitoring variable timerRefNum variable /g psh5_perf_secs timerRefNum = startMSTimer variable fileID = psh5_open_file(ANickName, APathName, AFileName) string wavenames = "" if (fileID) dfref fileDF = GetDataFolderDFR() svar s_filepath svar s_scanpaths AFileName = s_filepath print "loading " + s_filepath + "\r" variable ig = 0 variable ng = ItemsInList(s_scanpaths) string sg string folder sg = StringFromList(ig, s_scanpaths) folder = CleanupName(ReplaceString("/", sg, ""), 0) setdatafolder fileDF newdatafolder /s /o $folder dfref dataDF = GetDataFolderDFR() psh5_load_scan_meta(fileID, sg) newdatafolder /s /o attr psh5_load_scan_attrs(fileID, sg) setdatafolder dataDF wavenames = psh5_load_dataset_reduced(fileID, sg, "ScientaImage", reduction_func, reduction_param, progress=progress) psh5_close_file(fileID) endif if (timerRefNum >= 0) psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 endif setdatafolder saveDF return wavenames end /// load a reduced dataset from the open PShell HDF5 file. /// /// the function loads the dataset image by image using the hyperslab option /// and applies a custom reduction function to each image. /// the results from the reduction function are written to the `ReducedData1` and `ReducedData2` waves. /// the raw data are discarded. /// /// by default, the reduction function is called in separate threads to reduce the total loading time. /// (see the global variable psh5_perf_secs which reports the total run time of the function.) /// the effect varies depending on the balance between file loading (image size) /// and data processing (complexity of the reduction function). /// for debugging the reduction function, multi-threading can be disabled. /// /// if the reduction function requires the image waves to be scaled properly, /// the attributes must have been loaded by psh5_load_scan_attrs() before. /// in this case, the scales of the result waves are also set by the function. /// otherwise, the results can also be scaled by ps_scale_dataset() later. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to scan group in the HDF5 file. /// /// @param dataset name of the dataset. /// /// @param reduction_func custom reduction function /// (any user-defined function which has the same parameters as adh5_default_reduction()). /// /// @param reduction_param parameter string for the reduction function. /// /// @param progress progress window. /// @arg 1 (default) show progress window /// @arg 0 do not show progress window /// /// @param nthreads /// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread). /// @arg 0 use main thread only (e.g. for debugging the reduction function). /// @arg >= 1 use a fixed number of (additional) threads. /// /// @return semicolon-separated list of the loaded waves, /// `ReducedData1` and `ReducedData2` if successful. /// empty string if an error occurred. /// error messages are printed to the history. /// function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_func, reduction_param, [progress, nthreads]) variable fileID string scanpath string datasetname funcref adh5_default_reduction reduction_func string reduction_param variable progress variable nthreads if (ParamIsDefault(progress)) progress = 1 endif if (ParamIsDefault(nthreads)) nthreads = -1 endif variable result = 0 string datasetpath string datawavename string wavenames = "" datasetpath = scanpath + "/" + datasetname datasetpath = ReplaceString("//", datasetpath, "/") datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" result = -1 return wavenames endif if (di.ndims < 2) print "error: rank of dataset < 2" result = -2 return wavenames endif variable idx, idy, idz, idt idx = 1 idy = 0 idz = 2 idt = 3 variable nx, ny, nz, nt, nzt nx = di.dims[idx] ny = di.dims[idy] nz = di.dims[idz] nt = di.dims[idt] make /n=(nx,ny,nz,nt) /o $datawavename /wave=data // adjust nz and nt *after* making the data wave nz = max(nz, 1) nt = max(nt, 1) nzt = nz * nt // load data image by image HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) wave slab slab[][%Start] = 0 slab[][%Stride] = 1 slab[][%Count] = 1 slab[][%Block] = 1 slab[idx][%Block] = nx slab[idy][%Block] = ny // set up multi threading if (nthreads < 0) nthreads = ThreadProcessorCount endif if (nthreads > 0) variable threadGroupID = ThreadGroupCreate(nthreads) variable ithread for (ithread = 0; ithread < nthreads; ithread += 1) ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func) endfor else make /n=(nzt) /df /free processing_folders endif if (progress) display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt) endif make /n=(nx,ny) /d /o image_template setdimlabel 0, -1, $kEnergyDimLabel, image_template setdimlabel 1, -1, $kAngleDimLabel, image_template ps_scale_dataset(image_template) variable iz, it, izt string dfname izt = 0 for (iz = 0; iz < nz; iz += 1) for (it = 0; it < nt; it += 1) // load hyperslab slab[idz][%Start] = iz slab[idt][%Start] = it dfname = "processing_" + num2str(izt) newdatafolder /s $dfname HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath // send to processing queue duplicate image_template, image variable /g r_index = iz variable /g s_index = it string /g func_param = reduction_param if (nthreads > 0) WaveClear image ThreadGroupPutDF threadGroupID, : else processing_folders[izt] = GetDataFolderDFR() make /n=1/d profile1, profile2 wave slabdata variable /g func_result func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param) WaveClear slabdata, image, profile1, profile2 setdatafolder :: endif izt += 1 // progress window if (progress) if (update_progress_panel(izt)) print "user abort" result = -4 break endif endif endfor endfor killwaves /z slab, slabdata, image_template if (progress) update_progress_panel(0, message="Processing data (step 2 of 2)...") endif dfref dfr for (izt = 0; (izt < nzt) && (result == 0); izt += 1) if (nthreads > 0) do dfr = ThreadGroupGetDFR(threadGroupID, 1000) if (DatafolderRefStatus(dfr) != 0) break endif if (progress) if (update_progress_panel(izt)) print "user abort" result = -4 break endif endif while (1) else dfr = processing_folders[izt] if (progress) if (update_progress_panel(izt)) print "user abort" result = -4 break endif endif endif if (result != 0) break endif nvar rr = dfr:r_index nvar ss = dfr:s_index nvar func_result = dfr:func_result wave profile1 = dfr:profile1 wave profile2 = dfr:profile2 if (func_result == 0) if (izt == 0) make /n=(dimsize(profile1, 0), nz, nt) /d /o ReducedData1 make /n=(dimsize(profile2, 0), nz, nt) /d /o ReducedData2 setdimlabel 0, -1, $getdimlabel(profile1, 0, -1), ReducedData1 setdimlabel 0, -1, $getdimlabel(profile2, 0, -1), ReducedData2 setdimlabel 1, -1, $kScanDimLabel, ReducedData1 setdimlabel 1, -1, $kScanDimLabel, ReducedData2 setscale /p x dimoffset(profile1, 0), dimdelta(profile1, 0), waveunits(profile1, 0), ReducedData1 setscale /p x dimoffset(profile2, 0), dimdelta(profile2, 0), waveunits(profile2, 0), ReducedData2 setscale d 0, 0, waveunits(profile1, -1), ReducedData1 setscale d 0, 0, waveunits(profile2, -1), ReducedData2 endif ReducedData1[][rr][ss] = profile1[p] ReducedData2[][rr][ss] = profile2[p] else print "error during data reduction." result = -3 break endif endfor if (nthreads > 0) variable tstatus = ThreadGroupRelease(threadGroupID) if (tstatus == -2) print "error: thread did not terminate properly." result = -5 endif else for (izt = 0; izt < nzt; izt += 1) KillDataFolder /Z processing_folders[izt] endfor endif if (result == 0) if (nz == 1) redimension /n=(-1, 0, 0) ReducedData1 redimension /n=(-1, 0, 0) ReducedData2 elseif (nt == 1) redimension /n=(-1, nz, 0) ReducedData1 redimension /n=(-1, nz, 0) ReducedData2 endif wavenames = "ReducedData1;ReducedData2;" ps_scale_dataset(ReducedData1) ps_scale_dataset(ReducedData2) endif if (progress) kill_progress_panel() endif return wavenames end threadsafe static function reduce_slab_worker(reduction_func) funcref adh5_default_reduction reduction_func do // wait for job from main thread do dfref dfr = ThreadGroupGetDFR(0, 1000) if (DataFolderRefStatus(dfr) == 0) if (GetRTError(2)) return 0 // no more jobs endif else break endif while (1) // get input data wave slabdata = dfr:slabdata wave image = dfr:image svar func_param = dfr:func_param nvar rr = dfr:r_index nvar ss = dfr:s_index // do the work newdatafolder /s outDF make /n=1/d profile1, profile2 variable /g r_index = rr variable /g s_index = ss variable /g func_result func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param) // send output to queue and clean up WaveClear slabdata, image, profile1, profile2 ThreadGroupPutDF 0, : KillDataFolder dfr while (1) return 0 end threadsafe static function reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, reduction_param) wave slabdata wave image wave profile1 wave profile2 funcref adh5_default_reduction reduction_func string reduction_param image = slabdata[q][p][0][0] return reduction_func(image, profile1, profile2, reduction_param) end /// load descriptive info from a PShell data file. /// /// the info string lists the following information for each scan contained in the file: /// - path of the scan group inside the file. /// - number of scan positions. /// - dataset names of scan positioners. /// - dataset names of detectors. /// /// @param APathName igor symbolic path name. can be empty if the path is specified in AFileName or a dialog box should be displayed /// /// @param AFileName if empty a dialog box shows up /// /// @return newline terminated string. /// function /s psh5_load_info(APathName, AFileName) string APathName string AFileName dfref saveDF = GetDataFolderDFR() dfref fileDF = NewFreeDataFolder() setdatafolder fileDF variable fileID string filepath string scanpaths variable nscans variable iscan string scanpath string info = "" HDF5OpenFile /P=$APathName /R fileID as AFileName if (v_flag == 0) filepath = s_path + s_filename scanpaths = psh5_list_scans(fileID) nscans = ItemsInList(scanpaths) for (iscan = 0; iscan < nscans; iscan += 1) scanpath = StringFromList(iscan, scanpaths) info = info + scanpath + "\r" info = info + psh5_load_scan_info(fileID, scanpath) endfor HDF5CloseFile fileID endif setdatafolder saveDF return info end /// load descriptive info from a PShell scan. /// /// the info string contains up to three lines which are made up of the following information: /// - number of scan positions. /// - dataset names of scan positioners. /// - dataset names of detectors. /// /// @param fileID ID of open HDF5 file from psh5_open_file(). /// /// @param scanpath path to scan group in the HDF5 file. /// /// @return newline terminated string. /// function /s psh5_load_scan_info(fileID, scanpath) variable fileID string scanpath string info = "" string positions = "" string positioners = "" string detectors = "" psh5_load_scan_meta(fileID, scanpath) wave /z ScanDimensions wave /t /z ScanWritables wave /t /z ScanReadables wave /z ScanSteps if (WaveExists(ScanSteps) && (numpnts(ScanSteps) >= 1)) ScanSteps += 1 positions = "positions = (" + wave2list(ScanSteps, "%u", ",") + ")" info = AddListItem(positions, info, "\r", inf) endif if (WaveExists(ScanWritables) && (numpnts(ScanWritables) >= 1)) positioners = "positioners = " + twave2list(ScanWritables, ",") info = AddListItem(positioners, info, "\r", inf) endif if (WaveExists(ScanReadables) && (numpnts(ScanReadables) >= 1)) detectors = "detectors = " + twave2list(ScanReadables, ",") info = AddListItem(detectors, info, "\r", inf) endif return info end /// convert text wave to list. /// /// static function /s twave2list(wt, sep) wave /t wt string sep string list = "" variable n = numpnts(wt) variable i for (i = 0; i < n; i += 1) list = AddListItem(wt[i], list, sep, inf) endfor return list end /// convert numeric wave to list. /// /// static function /s wave2list(w, format, sep) wave w string format string sep string list = "" variable n = numpnts(w) variable i string s for (i = 0; i < n; i += 1) sprintf s, format, w[i] list = AddListItem(s, list, sep, inf) endfor return list end