diff --git a/mm/mm-physconst.ipf b/mm/mm-physconst.ipf new file mode 100644 index 0000000..154ed38 --- /dev/null +++ b/mm/mm-physconst.ipf @@ -0,0 +1,97 @@ +#pragma rtGlobals=1 // Use modern global access method. +#pragma version = 1.05 + +// physical constants +// original version: 03-05-23 by mm +// $Id$ + +// source: CODATA 2002 [Rev. Mod. Phys. 77, 1 (2005)] + +// universal constants + +constant kSpeedOfLight = 2.99792458e8 // m/s +constant kMagnField = 1.25663706144e-6 // V s / A / m +constant kElField = 8.854187817620e-12 // A s / V / m + +constant kGravitation = 6.6742e-11 // m^3 / kg / s^2 + +constant kHBar = 6.58211915e-16 // eV s +constant kPlanckBar = 6.58211915e-16 // eV s +constant kPlanck = 4.13566743e-15 // eV s +constant kHBarC = 197.326968 // MeV fm +constant kHC = 1239.84190605 // eV nm +constant kHCicm = 1239.84190605e-7 // eV cm^-1 +constant kPlanckBarSI = 1.05457168e-34 // J s +constant kPlanckSI = 6.6260693e-34 // J s + +// electromagnetic constants + +constant kElCharge = 1.60217653e-19 // A s +constant kMagnFlux = 2.06783372e-15 // Wb +constant kConductance = 7.748091733e-5 // S + +constant kBohrMagneton = 5.788381804e-5 // eV T^-1 +constant kBohrMagnetonSI = 9.27400949e-24 // J T^-1 = A m^2 +constant kNuclearMagneton = 3.152451259e-8 // eV T^-1 +constant kNuclearMagnetonSI = 5.05078343e-27 // J T^-1 + +// atomic and nuclear constants + +constant kFineStruct = 7.297352568e-3 +constant kInvFineStruct = 137.03599911 + +constant kRydberg = 10973731.568525 // m^-1 +constant kRydbergEnergy = 13.6056923 // eV +constant kBohrRadius = 0.5291772108e-10 // m +constant kHartreeEnergy = 27.2113845 // eV +constant kHartreeEnergySI = 4.35974417 // J + +constant kElectronMass = 510.998918e3 // eV c^-2 +constant kMuonMass = 105.6583692e6 // eV c^-2 +constant kProtonMass = 938.272029e6 // eV c^-2 +constant kNeutronMass = 939.565360e6 // eV c^-2 +constant kElectronMassSI = 9.1093826e-31 // kg +constant kProtonMassSI = 1.67262171e-27 // kg + +constant kComptonWavelength = 2.426310238e-12 // m +constant kElectronRadius = 2.817940325e-15 // m +constant kThomsonCrossSection = 0.665245873e-28 // m^2 +constant kElectronGFactor = -2.0023193043718 + +// physico-chemical constants + +constant kAvogadro = 6.0221415e23 // 1 / mol + +constant kAtomicMassUnit = 931.494043e6 // eV / c^2 +constant kAtomicMassUnitSI = 1.66053886e-27 // kg + +constant kMolarGasSI = 8.314472 // J / K / mol +constant kBoltzmann = 8.617343e-5 // eV / K +constant kBoltzmannSI = 1.3806505e-23 // J /K +constant kWien = 2.8977685e-3 // m K +constant kStefanBoltzmann = 5.670400e-8 // W m^-2 K^-4 + +constant kJoulesPerEV = 1.60217653e-19 // J / eV +constant kEVPerHartree = 27.2113845 // eV / Eh + +// custom constants + +constant kFreeElectronDispersion = 3.79736 // eV Angstrom^2 + // = h_bar^2 * c^2 / (2 * m_e) + // for E = kFreeElectronDispersion * k^2 + +threadsafe function FreeElectronWavelength(ekin, [v0, meff]) + // Wavelength of a quasi-free electron in meters + variable ekin // kinetic energy of the electron in eV + variable v0 // inner potential (where applicable), default = 0 + variable meff // effective mass relative to free electron, default = 1 + + if (ParamIsDefault(v0)) + v0 = 0 + endif + if (ParamIsDefault(meff)) + meff = 1 + endif + + return khc * 1e-9 / sqrt(2 * kElectronMass * meff * (ekin + v0)) +end diff --git a/pearl/pearl-area-import.ipf b/pearl/pearl-area-import.ipf index 653df02..18dae4c 100644 --- a/pearl/pearl-area-import.ipf +++ b/pearl/pearl-area-import.ipf @@ -3,23 +3,35 @@ #pragma ModuleName = PearlAreaImport #pragma version = 1.06 #include -#include "pearl-gui-tools", version >= 1.01 +#include "pearl-gui-tools" -// HDF5 file import from EPICS area detectors -// such as CCD cameras, 2D electron analysers +// copyright (c) 2013-16 Paul Scherrer Institut +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http:///www.apache.org/licenses/LICENSE-2.0 -// restriction: as of version 6.3, Igor can open datasets of up to rank 4. -// i.e. the extra dimension Y of the file plugin cannot be used. -// the extra dimensions N and X are supported. +/// @file +/// @brief HDF5 file import from EPICS area detectors +/// @ingroup ArpesPackage +/// +/// +/// HDF5 file import from EPICS area detectors +/// such as CCD cameras, 2D electron analysers +/// +/// as of Igor 6.3, Igor can open datasets of up to rank 4. +/// i.e. the extra dimension Y of the file plugin cannot be used. +/// the extra dimensions N and X are supported. -// created: matthias.muntwiler@psi.ch, 2013-05-31 -// Copyright (c) 2013 Paul Scherrer Institut -// $Id$ +/// @namespace PearlAreaImport +/// @brief HDF5 file import from EPICS area detectors +/// +/// PearlAreaImport is declared in @ref pearl-area-import.ipf. -//------------------------------------------------------------------------------ +/// callback function for drag&drop of HDF5 files into Igor. +/// static function BeforeFileOpenHook(refNum,fileName,path,type,creator,kind) - // allows drag&drop of data files into an open igor window - // this works only with igor 5.02 or later variable refNum, kind string fileName, path, type, creator @@ -57,20 +69,37 @@ static function BeforeFileOpenHook(refNum,fileName,path,type,creator,kind) return handledOpen // 1 tells Igor not to open the file End +/// generate the name of a data folder based on a file name. +/// +/// if the file name follows the naming convention source-date-index.extension, +/// the function tries to generate the nick name as source_date_index. +/// otherwise it's just a cleaned up version of the file name. +/// +/// date must be in yyyymmdd or yymmdd format and is clipped to the short yymmdd format. +/// index should be a running numeric index of up to 6 digits, or the time encoded as hhmmss. +/// however, in the current version index can be any string that can be a valid Igor folder name. +/// +/// @param filename file name, including extension. can also include a folder path (which is ignored). +/// the extension is currently ignored, but may be used to select the parent folder in a later version. +/// @param ignoredate if non-zero, the nick name will not include the date part. +/// defaults to zero. +/// @param sourcename nick name of the data source. +/// by default, the function tries to detect the source from the file name. +/// this option can be used to override auto-detection. +/// the automatic source names are: +/// sci (scienta by area detector), +/// psh (pshell), +/// sl (optics slit camera by area detector), +/// es (end station camera by area detector), +/// xy (unidentified). +/// @param unique if non-zero, the resulting name is made a unique data folder name in the current data folder +/// defaults to zero. +/// function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique]) - // suggests the name of a data folder based on a file name - // if the file name follows the naming convention source-date-index.extension, - // the function tries to generate the nick name as source_date_index. - // otherwise it's just a cleaned up version of the file name. - string filename // file name, including extension. can also include a folder path (which is ignored) - // the extension is currently ignored, but may be used later to select the parent folder - variable ignoredate // if non-zero, the nick name will not include the date part - // defaults to zero - string sourcename // nick name of the data source - // the function tries to detect the source from the file name - // this option can be used to override auto-detection - variable unique // if non-zero, the resulting name is made a unique data folder name in the current data folder - // defaults to zero + string filename + variable ignoredate + string sourcename + variable unique if (ParamIsDefault(ignoredate)) ignoredate = 0 @@ -86,6 +115,8 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique]) string autosource if (strsearch(basename, "scienta", 0, 2) >= 0) autosource = "sci" + elseif (strsearch(basename, "pshell", 0, 2) >= 0) + autosource = "psh" elseif (strsearch(basename, "OP-SL", 0, 2) >= 0) autosource = "sl" elseif (strsearch(basename, "ES-PS", 0, 2) >= 0) @@ -99,8 +130,12 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique]) variable nparts = ItemsInList(basename, "-") if (nparts >= 3) - string datepart = StringFromList(nparts - 2, basename, "-") - string indexpart = StringFromList(nparts - 1, basename, "-") + string datepart = StringFromList(1, basename, "-") + variable l_datepart = strlen(datepart) + if (l_datepart == 8) + datepart = datepart[l_datepart-6, l_datepart-1] + endif + string indexpart = StringFromList(2, basename, "-") if (ignoredate) sprintf nickname, "%s_%s", sourcename, indexpart else @@ -117,10 +152,12 @@ function /s ad_suggest_foldername(filename, [ignoredate,sourcename,unique]) return nickname end +/// load area detector data files selected in a file dialog window +/// +/// @param APathName Igor symbolic path name. +/// if empty, Igor will choose a folder on its own function ad_load_dialog(APathName) - // loads data files selected in a file dialog window - string APathName // igor symbolic path name - // if empty, Igor will choose a folder on its own + string APathName variable refNum string message = "Select data files" @@ -148,17 +185,23 @@ function ad_load_dialog(APathName) setdatafolder saveDF end -//------------------------------------------------------------------------------ +/// import everything from a HDF5 file created by the Area Detector software. +/// +/// if the data is from the electron analyser driver and some special attributes are included, +/// the function will set the scales of the image dimensions. +/// +/// @param ANickName destination folder name (top level under root) +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// @param AFileName if empty a dialog box shows up +/// @param load_data 1 (default): load data; 0: do not load data +/// @param load_attr 1 (default): load attributes; 0: do not load attributes +/// for proper wave scaling, the attributes must be loaded function /s adh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr]) - // this function loads everything from a HDF5 file created by the Area Detector software. - // if the data is from the electron analyser driver and some special attributes are included, - // the function will set the scales of the image dimensions. - string ANickName // destination folder name (top level under root) - string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed - string AFileName // if empty a dialog box shows up - variable load_data // 1 (default): load data; 0: do not load data - variable load_attr // 1 (default): load attributes; 0: do not load attributes - // for proper wave scaling, the attributes must be loaded + string ANickName + string APathName + string AFileName + variable load_data + variable load_attr if (ParamIsDefault(load_data)) load_data = 1 @@ -222,30 +265,43 @@ function /s adh5_load_complete(ANickName, APathName, AFileName, [load_data, load return AFileName end +/// load and reduce a dataset from a HDF5 file created by the Area Detector software. +/// +/// the resulting dataset is reduced in one image dimension by a user-defined reduction function, +/// e.g. by region-of-interest integration, curve fitting, etc. +/// +/// the function loads the dataset image by image using the hyperslab option +/// and applies a custom reduction function to each image. +/// the results from the reduction function are composed into one result wave. +/// the raw data are discarded. +/// +/// if the data is from the electron analyser driver and some special attributes are included, +/// the function will set the scales of the image dimensions. +/// +/// @param ANickName destination folder name (top level under root) +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// @param AFileName if empty a dialog box shows up +/// +/// @param reduction_func custom reduction function +/// (any user-defined function which has the same parameters as adh5_default_reduction()) +/// @param reduction_param parameter string for the reduction function +/// +/// @param load_data 1 (default): load data; 0: do not load data +/// @param load_attr 1 (default): load attributes; 0: do not load attributes +/// for proper wave scaling, the attributes must be loaded +/// @param progress 1 (default): show progress window; 0: do not show progress window +/// function /s adh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [load_data, load_attr, progress]) - // this function loads a reduced dataset from a HDF5 file created by the Area Detector software. - // the resulting dataset is reduced in one image dimension by a user-defined reduction function, - // e.g. by region-of-interest integration, curve fitting, etc. + string ANickName + string APathName + string AFileName - // the function loads the dataset image by image using the hyperslab option - // and applies a custom reduction function to each image. - // the results from the reduction function are composed into one result wave. - // the raw data are discarded. + funcref adh5_default_reduction reduction_func + string reduction_param - // if the data is from the electron analyser driver and some special attributes are included, - // the function will set the scales of the image dimensions. - string ANickName // destination folder name (top level under root) - string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed - string AFileName // if empty a dialog box shows up - - funcref adh5_default_reduction reduction_func // custom reduction function - // (any user-defined function which has the same parameters as adh5_default_reduction()) - string reduction_param // parameter string for the reduction function - - variable load_data // 1 (default): load data; 0: do not load data - variable load_attr // 1 (default): load attributes; 0: do not load attributes - // for proper wave scaling, the attributes must be loaded - variable progress // 1 (default): show progress window; 0: do not show progress window + variable load_data + variable load_attr + variable progress if (ParamIsDefault(load_data)) load_data = 1 @@ -306,18 +362,26 @@ function /s adh5_load_reduced(ANickName, APathName, AFileName, reduction_func, r return AFileName end +/// load a single image from a HDF5 file created by the Area Detector software. +/// +/// the data wave is loaded into the current data folder. +/// attributes are loaded into the attr subfolder. existing waves in attr are deleted. +/// +/// @warning EXPERIMENTAL +/// this function uses the root:pearl_area:preview data folder. existing data there may be deleted! +/// +/// @param ANickName destination wave name. the wave is created in the current data folder. +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// @param AFileName if empty a dialog box shows up +/// @param load_data 1 (default): load data; 0: do not load data +/// @param load_attr 1 (default): load attributes; 0: do not load attributes +/// note: for correct scaling of the image, the attributes need to be loaded function /s adh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr]) - // this function loads one image from a HDF5 file created by the Area Detector software. - // the data wave is loaded into the current data folder. - // attributes are loaded into the attr subfolder. existing waves in attr are deleted. - // EXPERIMENTAL - // this function uses the root:pearl_area:preview data folder. existing data there may be deleted! - string ANickName // destination wave name. the wave is created in the current data folder. - string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed - string AFileName // if empty a dialog box shows up - variable load_data // 1 (default): load data; 0: do not load data - variable load_attr // 1 (default): load attributes; 0: do not load attributes - // note: for correct scaling of the image, the attributes need to be loaded + string ANickName + string APathName + string AFileName + variable load_data + variable load_attr if (ParamIsDefault(load_data)) load_data = 1 @@ -412,13 +476,19 @@ function /s adh5_load_preview(ANickName, APathName, AFileName, [load_data, load_ return AFileName end +/// load descriptive info from a HDF5 file created by the Area Detector software. +/// +/// the information returned is the array size and active scans +/// +/// @attention EXPERIMENTAL +/// this function should be merged with adh5_load_preview +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// @param AFileName if empty a dialog box shows up +/// function /s adh5_load_info(APathName, AFileName) - // this function loads descriptive info from a HDF5 file created by the Area Detector software. - // the information returned is the array size and active scans - // EXPERIMENTAL - // this function should be merged with adh5_load_preview - string APathName // igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed - string AFileName // if empty a dialog box shows up + string APathName + string AFileName dfref saveDF = GetDataFolderDFR() @@ -492,12 +562,17 @@ function /s adh5_load_info(APathName, AFileName) return s_info end +/// load the detector dataset from the open HDF5 file. +/// +/// the function loads the whole dataset at once +/// and redimensions it so that the image dimensions are X and Y +/// +/// @param fileID ID of open HDF5 file from HDF5OpenFile +/// @param detectorpath path to detector group in the HDF5 file +/// function adh5_load_detector(fileID, detectorpath) - // loads the detector dataset from the open HDF5 file - // the function loads the whole dataset at once - // and redimensions it so that the image dimensions are X and Y - variable fileID // ID of open HDF5 file from HDF5OpenFile - string detectorpath // path to detector group in the HDF5 file + variable fileID + string detectorpath // avoid compilation error if HDF5 XOP has not been loaded #if Exists("HDF5LoadData") @@ -526,19 +601,22 @@ function adh5_load_detector(fileID, detectorpath) #endif end +/// redimension a multi-dimensional area detector array loaded from HDF5. +/// +/// so that the image dimensions are X and Y +/// singleton dimensions are removed (most common cases only) +/// +/// in the redimensioned array, the original dimension type is noted in the dimension label: +/// AD_Dim0 = first image dimension +/// AD_Dim1 = second image dimension +/// AD_DimN = frame sequence +/// AD_DimX = extra dimension X +/// AD_DimY = extra dimension Y (cannot be loaded in Igor) +/// +/// @param data area detector data loaded from HDF5 to be redimensioned +/// function adh5_redim(data) - // redimensions a multi-dimensional area detector array loaded from HDF5 - // so that the image dimensions are X and Y - // singleton dimensions are removed (most common cases only) - - // in the redimensioned array, the original dimension type is noted in the dimension label: - // AD_Dim0 = first image dimension - // AD_Dim1 = second image dimension - // AD_DimN = frame sequence - // AD_DimX = extra dimension X - // AD_DimY = extra dimension Y (cannot be loaded in Igor) - - wave data // area detector data loaded from HDF5 to be redimensioned + wave data duplicate /free data, tempdata variable nd = wavedims(tempdata) @@ -608,29 +686,36 @@ function adh5_redim(data) endswitch end +/// find the attributes data folder of an area detector dataset. +/// +/// since version 1.04 attributes should be stored in a subfolder named attr. +/// earlier versions had the attributes in the same data folder as the actual dataset. +/// +/// @param data wave containing the main dataset. +/// +/// @return data folder reference of the attributes folder. +/// the reference may be invalid (and default to root) if the folder cannot be found, +/// cf. built-in DataFolderRefStatus function. static function /DF GetAttrDataFolderDFR(data) - // returns a data folder reference to the ND attributes - // since version 1.04 attributes should be written in a subfolder named attr - // earlier versions had the attributes in the same data folder as the actual dataset wave data - dfref saveDF = GetDataFolderDFR() dfref dataDF = GetWavesDataFolderDFR(data) - setdatafolder dataDF - if (DataFolderExists(":attr")) - setdatafolder :attr + dfref attrDF = dataDF:attr + if (DataFolderRefStatus(attrDF) == 0) + attrDF = dataDF endif - dfref attrDF = GetDataFolderDFR() - setdatafolder saveDF + return attrDF end +/// set the dimension scales of an area detector dataset. +/// +/// the intrinsic dimensions 0 and 1 are scaled according to the data source +/// (currently supported: Prosilica cameras, Scienta electron analyser). +/// the extra dimensions are scaled according to the scan. +/// the latter requires that the positioner names and position values are available. +/// function adh5_scale(data,[source]) - // tries to set the dimension scales of an area detector dataset. - // the intrinsic dimensions 0 and 1 are scaled according to the data source - // (currently supported: Prosilica cameras, Scienta electron analyser). - // the extra dimensions are scaled according to the scan. - // the latter requires that the positioner names and position values are available. wave data string source @@ -671,14 +756,23 @@ function adh5_scale(data,[source]) setdatafolder saveDF end +/// load the detector dataset from the open HDF5 file. +/// +/// the function loads the dataset image by image using the hyperslab option. +/// this function gives the same result as adh5_load_detector. +/// it is about 5% slower, and it depends on HDF5 Browser code. +/// but it does not choke on large datasets (as long as the final wave fits into memory). +/// +/// @param fileID ID of open HDF5 file from HDF5OpenFile. +/// @param detectorpath path to detector group in the HDF5 file. +/// @param progress 1 (default): show progress window; 0: do not show progress window. +/// +/// @return 0 if successful, non-zero if an error occurred. +/// function adh5_load_detector_slabs(fileID, detectorpath, [progress]) - // loads the detector dataset from the open HDF5 file - // the function loads the dataset image by image using the hyperslab option - // this function gives the same result as adh5_load_detector - // it is about 5% slower, and it depends on HDF5 Browser code. - variable fileID // ID of open HDF5 file from HDF5OpenFile - string detectorpath // path to detector group in the HDF5 file - variable progress // 1 (default): show progress window; 0: do not show progress window + variable fileID + string detectorpath + variable progress if (ParamIsDefault(progress)) progress = 1 @@ -821,23 +915,32 @@ function adh5_load_detector_slabs(fileID, detectorpath, [progress]) return result end +/// load a single image from the detector dataset of the open HDF5 file +/// +/// the function can average over a region in the extra dimensions. +/// +/// @param fileID ID of open HDF5 file from HDF5OpenFile +/// @param detectorpath path to detector group in the HDF5 file +/// @param dim2start 2nd dimension coordinate of the first image +/// note that the order of dimensions is reversed in the file +/// 2nd dimension = N dimension in area detector = dimension 0 of the three-dimensional HDF dataset +/// set to 0 if dimension may not be present +/// @param dim2count number of subsequent images to average +/// set to 1 if dimension may not be present +/// @param dim3start 3rd dimension coordinate of the first image +/// note that the order of dimensions is reversed in the file +/// 3rd dimension = extra X dimension in area detector = dimension 0 of the four-dimensional HDF dataset +/// set to 0 if dimension may not be present +/// @param dim3count number of subsequent images to average +/// set to 1 if dimension may not be present +/// function adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, dim3start, dim3count) - // loads a single image from the detector dataset of the open HDF5 file - // the function can average over a region in the extra dimensions - variable fileID // ID of open HDF5 file from HDF5OpenFile - string detectorpath // path to detector group in the HDF5 file - variable dim2start // 2nd dimension coordinate of the first image - // note that the order of dimensions is reversed in the file - // 2nd dimension = N dimension in area detector = dimension 0 of the three-dimensional HDF dataset - // set to 0 if dimension may not be present - variable dim2count // number of subsequent images to average - // set to 1 if dimension may not be present - variable dim3start // 3rd dimension coordinate of the first image - // note that the order of dimensions is reversed in the file - // 3rd dimension = extra X dimension in area detector = dimension 0 of the four-dimensional HDF dataset - // set to 0 if dimension may not be present - variable dim3count // number of subsequent images to average - // set to 1 if dimension may not be present + variable fileID + string detectorpath + variable dim2start + variable dim2count + variable dim3start + variable dim3count // avoid compilation error if HDF5 XOP has not been loaded #if Exists("HDF5LoadData") @@ -922,10 +1025,12 @@ function adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, di #endif end +/// get a list of functions which can be used as reduction functions. +/// +/// the function evaluates only the function arguments, +/// it may thus include functions which are not suitable as reduction functions. +/// function /s adh5_list_reduction_funcs() - // returns a list of functions which can be used as reduction functions - // the function evaluates only the function arguments, - // it may thus include functions which are not suitable as reduction functions. string all_funcs = FunctionList("*", ";", "KIND:6,NPARAMS:4,VALTYPE:1") string result = "" @@ -960,26 +1065,35 @@ function /s adh5_list_reduction_funcs() return result end +/// function prototype for adh5_load_reduced_detector +/// +/// derived functions reduce a two-dimensional dataset to a one-dimensional dataset, +/// e.g. by ROI-integration, curve fitting, etc. +// the resulting wave must have the same size as either dimension of the source image. +/// +/// each destination wave is a one-dimensional intensity distribution. +/// the function must redimension each of these waves to one of the image dimensions +/// by calling the adh5_setup_profile() function. +/// this function will also copy the scale information and dimension labels, +/// which is important for the proper scaling of the result. +/// +/// the meaning of the data in dest1 and dest2 is up to the particular function, +/// e.g. dest1 could hold the mean value and dest2 the one-sigma error, +/// or dest1 could hold the X-profile, and dest2 the Y-profile. +/// +/// @param source source wave +/// two-dimensional intensity distribution (image) +/// @param dest1, dest2 destination waves +/// @param param string with optional parameters, shared between calls. +/// this is a pass-by-reference argument, +/// the function may modify the string +/// +/// @return zero if successful, non-zero if an error occurs. +/// threadsafe function adh5_default_reduction(source, dest1, dest2, param) - // function prototype for adh5_load_reduced_detector - // derived functions reduce a two-dimensional dataset to a one-dimensional dataset, - // e.g. by ROI-integration, curve fitting, etc. - // the resulting wave must have the same size as either dimension of the source image. - wave source // source wave - // two-dimensional intensity distribution (image) - wave dest1, dest2 // destination waves - // each wave is a one-dimensional intensity distribution - // the function must redimension each of these waves to one of the image dimensions - // by calling the adh5_setup_profile() function. - // this function will also copy the scale information and dimension labels, - // which is important for the proper scaling of the result. - - // the meaning of the data in dest1 and dest2 is up to the particular function, - // e.g. dest1 could hold the mean value and dest2 the one-sigma error, - // or dest1 could hold the X-profile, and dest2 the Y-profile. - string ¶m // string with optional parameters, shared between calls - // this is a pass-by-reference argument, - // the function may modify the string + wave source + wave dest1, dest2 + string ¶m // demo code // integrate along the dimensions @@ -988,13 +1102,15 @@ threadsafe function adh5_default_reduction(source, dest1, dest2, param) adh5_setup_profile(source, dest2, 1) ad_profile_y_w(source, 0, -1, dest2) - return 0 // return zero if successful, non-zero if an error occurs + return 0 end +/// set up a one-dimensional wave for a line profile based on a 2D original wave. +/// +/// redimensions the profile wave to the given dimension. +/// copies the scale and dimension label of the given dimension. +/// threadsafe function adh5_setup_profile(image, profile, dim) - // sets up a one-dimensional wave for a line profile based on a 2D original wave - // redimensions the profile wave to the given dimension - // copies the scale and dimension label of the given dimension wave image // prototype wave profile // destination wave variable dim // which dimension to keep: 0 = X, 1 = Y @@ -1005,10 +1121,11 @@ threadsafe function adh5_setup_profile(image, profile, dim) setdimlabel 0, -1, $getdimlabel(image, dim, -1), profile end +/// wrapper function for testing reduction functions from the command line. +/// +/// Igor does not allow global variables as pass-by-reference parameter for reduction_param. +/// function /s adh5_test_reduction_func(source, dest1, dest2, reduction_func, reduction_param) - // wrapper function for testing reduction functions from the command line. - // Igor does not allow global variables as pass-by-reference parameter for reduction_param. - wave source wave dest1 wave dest2 @@ -1020,28 +1137,35 @@ function /s adh5_test_reduction_func(source, dest1, dest2, reduction_func, reduc return reduction_param end +/// load a reduced detector dataset from the open HDF5 file. +/// +/// the function loads the dataset image by image using the hyperslab option +/// and applies a custom reduction function to each image. +/// the results from the reduction function are composed into one result wave. +/// the raw data are discarded. +/// +/// by default, the reduction function is called in separate threads to reduce the total loading time. +/// (see the global variable adh5_perf_secs which reports the total run time of the function.) +/// the effect varies depending on the balance between file loading (image size) +/// and data processing (complexity of the reduction function). +/// for debugging the reduction function, multi-threading can be disabled. +/// +/// @param fileID ID of open HDF5 file from HDF5OpenFile +/// @param detectorpath path to detector group in the HDF5 file +/// @param reduction_func custom reduction function +/// (any user-defined function which has the same parameters as adh5_default_reduction()) +/// @param reduction_param parameter string for the reduction function +/// @param progress 1 (default): show progress window; 0: do not show progress window +/// @param nthreads -1 (default): use as many threads as there are processor cores (in addition to main thread) +/// 0: use main thread only (e.g. for debugging the reduction function) +/// >= 1: use a fixed number of (additional) threads function adh5_load_reduced_detector(fileID, detectorpath, reduction_func, reduction_param, [progress, nthreads]) - // loads a reduced detector dataset from the open HDF5 file - // the function loads the dataset image by image using the hyperslab option - // and applies a custom reduction function to each image. - // the results from the reduction function are composed into one result wave. - // the raw data are discarded. - - // by default, the reduction function is called in separate threads to reduce the total loading time. - // (see the global variable adh5_perf_secs which reports the total run time of the function.) - // the effect varies depending on the balance between file loading (image size) - // and data processing (complexity of the reduction function). - // for debugging the reduction function, multi-threading can be disabled. - - variable fileID // ID of open HDF5 file from HDF5OpenFile - string detectorpath // path to detector group in the HDF5 file - funcref adh5_default_reduction reduction_func // custom reduction function - // (any user-defined function which has the same parameters as adh5_default_reduction()) - string reduction_param // parameter string for the reduction function - variable progress // 1 (default): show progress window; 0: do not show progress window - variable nthreads // -1 (default): use as many threads as there are processor cores (in addition to main thread) - // 0: use main thread only (e.g. for debugging the reduction function) - // >= 1: use a fixed number of (additional) threads + variable fileID + string detectorpath + funcref adh5_default_reduction reduction_func + string reduction_param + variable progress + variable nthreads if (ParamIsDefault(progress)) progress = 1 @@ -1340,17 +1464,21 @@ threadsafe static function reduce_slab_image(slabdata, image, profile1, profile2 return reduction_func(image, profile1, profile2, reduction_param) end +/// load an NDAttributes group from an open HDF5 file into the current data folder. +/// +/// datasets contained in the group are loaded as waves. +/// if a dataset contains only one data point, it is added to the IN, ID, IV, IU waves, +/// where IN = EPICS channel name, ID = attribute name, IV = value, IU = unit +/// (units are left empty as they are not saved in HDF5). +/// attributes of the NDAttributes group are added to the IN, ID, IV, IU waves, +/// however, IN and IU are left empty as this information is not saved in the HDF5 file. +/// +/// @param fileID ID of open HDF5 file from HDF5OpenFile +/// @param attributespath path to NDAttributes group in the HDF5 file +/// function adh5_loadattr_all(fileID, attributespath) - // loads an NDAttributes group from an open HDF5 file into the current data folder. - // datasets contained in the group are loaded as waves. - // if a dataset contains only one data point, it is added to the IN, ID, IV, IU waves, - // where IN = EPICS channel name, ID = attribute name, IV = value, IU = unit - // (units are left empty as they are not saved in HDF5). - // attributes of the NDAttributes group are added to the IN, ID, IV, IU waves, - // however, IN and IU are left empty as this information is not saved in the HDF5 file. - - variable fileID // ID of open HDF5 file from HDF5OpenFile - string attributespath // path to NDAttributes group in the HDF5 file + variable fileID + string attributespath string datasetname string datawavename @@ -1417,11 +1545,20 @@ function adh5_loadattr_all(fileID, attributespath) end +/// sub-function of adh5_loadattr_all. +/// +/// reads one attribute from a wave which was loaded from an HDF5 file into the info waves IN, ID, IV, IU. +/// the attribute is read only if the input wave contains exactly one item, +/// i.e. either the measurement is a single image, or the attribute has string type. +/// +/// @param datawavename name of the attribute wave in the current folder. +/// can be text or numeric. +/// @param source source identifier (EPICS name) of the attribute. +/// @param idest destination index in IN, ID, IV, IU where the results are written. +/// the variable is incremented if data was written, otherwise it is left unchanged. +/// make sure IN, ID, IV, IU have at least idest + 1 elements. +/// static function read_attribute_info(datawavename, source, idest) - // sub-function of adh5_loadattr_all. - // reads one attribute from a wave which was loaded from an HDF5 file into the info waves IN, ID, IV, IU. - // the attribute is read only if the input wave contains exactly one item, - // i.e. either the measurement is a single image, or the attribute has string type. string datawavename // name of the attribute wave in the current folder. // can be text or numeric. string source @@ -1467,12 +1604,14 @@ static function read_attribute_info(datawavename, source, idest) endif end +/// set the energy and angle scales of an area detector dataset from the Scienta analyser. +/// +/// the dimension labels of the energy and angle scales must be set correctly: +/// AD_Dim0 = energy dimension; AD_Dim1 = angle dimension. +/// these dimensions must be the first two dimensions of a multi-dimensional dataset. +/// normally, AD_Dim0 is the X dimension, and AD_Dim1 the Y dimension. +/// function adh5_scale_scienta(data) - // sets the energy and angle scales of an area detector dataset from the Scienta analyser - // the dimension labels of the energy and angle scales must be set correctly: - // AD_Dim0 = energy dimension; AD_Dim1 = angle dimension - // these dimensions must be the first two dimensions of a multi-dimensional dataset. - // normally, AD_Dim0 is the X dimension, and AD_Dim1 the Y dimension. wave data dfref saveDF = GetDataFolderDFR() @@ -1571,11 +1710,13 @@ function adh5_scale_scienta(data) setdatafolder saveDF end +/// scales the extra dimensions of an area detector dataset according to the EPICS scan +/// +/// the scan positioner name and its values must be available +/// +/// @todo incomplete +/// function adh5_scale_scan(data) - // scales the extra dimensions of an area detector dataset according to the EPICS scan - // the scan positioner name and its values must be available - - // TODO: incomplete wave data dfref saveDF = GetDataFolderDFR() diff --git a/pearl/pearl-arpes.ipf b/pearl/pearl-arpes.ipf index 95b1205..5942bfd 100644 --- a/pearl/pearl-arpes.ipf +++ b/pearl/pearl-arpes.ipf @@ -5,6 +5,7 @@ #include "pearl-area-display" // 2D and 3D data visualization #include "pearl-area-profiles" // data processing for multi-dimensional datasets #include "pearl-area-import" // import data files generated by area detector software +#include "pearl-pshell-import" #include "pearl-data-explorer" // preview and import panel for PEARL data #include "pearl-anglescan-process" #include "pearl-anglescan-tracker" // live preview of hemispherical angle scan diff --git a/pearl/pearl-data-explorer.ipf b/pearl/pearl-data-explorer.ipf index eafa88d..e891c92 100644 --- a/pearl/pearl-data-explorer.ipf +++ b/pearl/pearl-data-explorer.ipf @@ -1,36 +1,51 @@ #pragma rtGlobals=3 // Use modern global access method and strict wave access. #pragma IgorVersion = 6.1 #pragma ModuleName = PearlDataExplorer -#pragma version = 1.41 -#include "pearl-area-import", version >= 1.06 -#include "pearl-area-profiles", version >= 1.04 -#include "pearl-area-display", version >= 1.04 - -// preview and import panel for PEARL data: -// scienta analyser, prosilica cameras, s-scans, otf-scans - -// $Id$ -// author: matthias.muntwiler@psi.ch -// Copyright (c) 2013-14 Paul Scherrer Institut +#pragma version = 1.43 +#include "pearl-area-import" +#include "pearl-area-profiles" +#include "pearl-area-display" +#include "pearl-pshell-import" +// copyright (c) 2013-16 Paul Scherrer Institut +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 +// http:///www.apache.org/licenses/LICENSE-2.0 + +/// @file +/// @brief preview and import panel for PEARL data +/// @ingroup ArpesPackage +/// +/// +/// preview and import panel for PEARL data: +/// scienta analyser, prosilica cameras, s-scans, otf-scans + +/// @namespace PearlDataExplorer +/// @brief preview and import panel for PEARL data +/// +/// PearlDataExplorer is declared in @ref pearl-data-explorer.ipf. static strconstant package_name = "pearl_explorer" static strconstant package_path = "root:packages:pearl_explorer:" +static strconstant ks_filematch_adh5 = "*.h5" +static strconstant ks_filematch_pshell = "psh*.h5" +static strconstant ks_filematch_itx = "*.itx" + function pearl_data_explorer() init_package() load_prefs() execute /q/z "PearlDataExplorer()" end +/// initialize the global variables of the data explorer. +/// +/// initializes the global variables and data folder for this procedure file +/// must be called once before the panel is created +/// warning: this function overwrites previous values static function init_package() - // initializes the global variables and data folder for this procedure file - // must be called once before the panel is created - // warning: this function overwrites previous values dfref savefolder = GetDataFolderDFR() SetDataFolder root: @@ -64,7 +79,8 @@ static function init_package() string /g s_preview_source = "" // data source, e.g. EPICS channel name, of the current preview string /g s_profiles_graph = "" // window name of the current preview if the data is two-dimensional string /g s_preview_trace_graph = "" // window name of the current preview if the data is one-dimensional - + string /g s_file_info = "" // description of selected file + variable/g v_InitPanelDone = 1 SetDataFolder savefolder @@ -201,9 +217,11 @@ static function preview_file(filename) dfref saveDF = GetDataFolderDFR() - if (StringMatch(filename, "*.h5")) + if (StringMatch(filename, ks_filematch_pshell)) + wave /z image = preview_pshell_file(filename) + elseif (StringMatch(filename, ks_filematch_adh5)) wave /z image = preview_hdf_file(filename) - elseif (StringMatch(filename, "*.itx")) + elseif (StringMatch(filename, ks_filematch_itx)) wave /z image = preview_itx_file(filename) endif @@ -224,6 +242,49 @@ static function preview_file(filename) setdatafolder saveDF end +/// load the preview of a PShell HDF5 file (not implemented). +/// +/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). +/// the preview is loaded to the preview_image wave in the pear_explorer data folder. +/// +/// the s_file_info string is updated with information about the scan dimensions. +/// +/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. +/// +/// @return wave reference of the preview image +/// +static function /wave preview_pshell_file(filename) + string filename + + dfref saveDF = GetDataFolderDFR() + setdatafolder $package_path + svar s_preview_file + svar s_preview_source + psh5_load_preview("preview_image", "pearl_explorer_filepath", filename) + s_preview_file = filename + s_preview_source = "" + wave /z preview_image + + svar /z s_file_info + if (! svar_exists(s_file_info)) + string /g s_file_info + endif + if (strlen(s_preview_file) > 0) + s_file_info = psh5_load_info("pearl_explorer_filepath", filename) + else + s_file_info = "" + endif + + if (DataFolderExists("attr")) + setdatafolder attr + preview_attributes(GetDataFolderDFR()) + setdatafolder :: + endif + + setdatafolder saveDF + return preview_image +end + /// load the preview of a PEARL HDF5 file. /// /// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). @@ -777,13 +838,19 @@ static function load_file(filename, [options]) dfref saveDF = GetDataFolderDFR() - if (StringMatch(filename, "*.h5")) + if (StringMatch(filename, ks_filematch_pshell)) + if (ParamIsDefault(options)) + load_pshell_file(filename) + else + load_pshell_file(filename, options=options) + endif + elseif (StringMatch(filename, ks_filematch_adh5)) if (ParamIsDefault(options)) load_hdf_file(filename) else load_hdf_file(filename, options=options) endif - elseif (StringMatch(filename, "*.itx")) + elseif (StringMatch(filename, ks_filematch_itx)) load_itx_file(filename) endif @@ -796,7 +863,7 @@ static function prompt_hdf_options(options) string mode = StringByKey("mode", options, ":", ";") string reduction_func = StringByKey("reduction_func", options, ":", ";") - string modes = "adh5_load_reduced" + string modes = "load_reduced" string reduction_functions = adh5_list_reduction_funcs() if (strlen(mode) == 0) @@ -817,17 +884,19 @@ static function prompt_hdf_options(options) return v_flag // 0 = OK, 1 = cancel end +/// prototype for prompting for processing function parameters. +/// +/// the function should prompt the user for function parameters, +/// and update the param argument if the user clicked OK. +/// returns 0 if the user clicked OK, 1 if the user cancelled. +/// +/// prompt functions must have the same name as the corresponding reduction function +/// with the prefix "prompt_". +/// be aware of the limited length of function names in Igor. +/// +/// this function is a prototype. it does nothing but returns OK. +/// function prompt_default_process(param) - // prototype for prompting for processing function parameters. - // the function should prompt the user for function parameters, - // and update the param argument if the user clicked OK. - // returns 0 if the user clicked OK, 1 if the user cancelled. - - // prompt functions must have the same name as the corresponding reduction function - // with the prefix "prompt_". - // be aware of the limited length of function names in Igor. - - // this function is a prototype. it does nothing but returns OK. string ¶m return 0 @@ -847,6 +916,57 @@ function prompt_func_params(func_name, func_param) endif end +static function /df load_pshell_file(filename, [options]) + string filename + string options + + dfref saveDF = GetDataFolderDFR() + string nickname = ad_suggest_foldername(filename) + string loaded_filename = "" + + if (ParamIsDefault(options)) + loaded_filename = psh5_load_complete(nickname, "pearl_explorer_filepath", filename) + else + if (strlen(options) == 0) + svar pref_options = $(package_path + "s_hdf_options") + options = pref_options + if (prompt_hdf_options(options) == 0) + // OK + pref_options = options + else + // cancel + options = "" + endif + endif + + string mode = StringByKey("mode", options, ":", ";") + + strswitch(mode) + case "load_reduced": + string reduction_func = StringByKey("reduction_func", options, ":", ";") + svar pref_params = $(package_path + "s_reduction_params") + string reduction_params = pref_params + if (prompt_func_params(reduction_func, reduction_params) == 0) + pref_params = reduction_params + psh5_load_reduced(nickname, "pearl_explorer_filepath", filename, $reduction_func, reduction_params) + svar s_filepath + loaded_filename = s_filepath + endif + break + endswitch + endif + + dfref dataDF + if (strlen(loaded_filename) > 0) + setdatafolder $("root:" + nickname) + dataDF = GetDataFolderDFR() + string /g pearl_explorer_import = "load_pshell_file" + endif + + setdatafolder saveDF + return dataDF +end + static function /df load_hdf_file(filename, [options]) string filename string options @@ -873,7 +993,7 @@ static function /df load_hdf_file(filename, [options]) string mode = StringByKey("mode", options, ":", ";") strswitch(mode) - case "adh5_load_reduced": + case "load_reduced": string reduction_func = StringByKey("reduction_func", options, ":", ";") svar pref_params = $(package_path + "s_reduction_params") string reduction_params = pref_params diff --git a/pearl/pearl-pshell-import.ipf b/pearl/pearl-pshell-import.ipf new file mode 100644 index 0000000..04720bf --- /dev/null +++ b/pearl/pearl-pshell-import.ipf @@ -0,0 +1,1776 @@ +#pragma rtGlobals=3 // Use modern global access method and strict wave access. +#pragma IgorVersion = 6.2 +#pragma ModuleName = PearlPShellImport +#pragma version = 1.02 +#include +#include "pearl-gui-tools" +#include "pearl-area-import" + +// copyright (c) 2013-16 Paul Scherrer Institut +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http:///www.apache.org/licenses/LICENSE-2.0 + +/// @file +/// @brief import data from PShell +/// @ingroup ArpesPackage +/// +/// +/// HDF5 file import from the PShell data acquisition program. +/// the main import functions are: +/// +/// - psh5_load_complete() +/// load all scans and datasets from a file. +/// +/// - psh5_load_reduced() +/// load the ScientaImage dataset of the first scan and reduce its dimensionality. +/// +/// - psh5_load_scan_complete() +/// load all datasets of a selected scan. +/// +/// - psh5_load_scan_preview() +/// load a preview of a selected scan. +/// +/// - psh5_load_dataset() +/// load a selected dataset. +/// +/// - psh5_load_dataset_reduced() +/// load a selected dataset and reduce its dimensionality. +/// +/// the following helper functions are also needed: +/// +/// - psh5_open_file() +/// - psh5_close_file() +/// - psh5_list_scans() +/// - psh5_list_scan_datasets() +/// - psh5_load_scan_meta() +/// - psh5_load_scan_attrs() + +/// @namespace PearlPShellImport +/// @brief import data from PShell +/// +/// PearlPShellImport is declared in @ref pearl-pshell-import.ipf. + +/// Dimension label for the energy dispersive dimension of multi-dimensional datasets +strconstant kEnergyDimLabel = "energy" + +/// Dimension label for the angle dispersive dimension of multi-dimensional datasets +strconstant kAngleDimLabel = "angle" + +/// Dimension label for the scan dimension of multi-dimensional datasets +strconstant kScanDimLabel = "scan" + +/// Dimension label for the data dimension. +/// This label may be used to store the parameters for the `setscale d` operation. +strconstant kDataDimLabel = "data" + +/// open a HDF5 file created by the PShell data acquisition program and prepare the data folder. +/// +/// the function opens a specified or interactively selected HDF5 file, +/// creates a data folder `$ANickName` under root, +/// and changes to the new data folder. +/// +/// the file must be closed by psh5_close_file() after use. +/// +/// @param ANickName destination folder name (top level under root). +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// +/// @param AFileName if empty a dialog box shows up +/// +/// @return ID of open HDF5 file from HDF5OpenFile. +/// zero if an error occurred. +/// +/// @return global string s_filepath in new data folder contains the full file path on disk. +/// +/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. +/// +function psh5_open_file(ANickName, APathName, AFileName) + string ANickName + string APathName + string AFileName + + setdatafolder root: + newdatafolder /s /o $("root:" + ANickName) + dfref fileDF = GetDataFolderDFR() + + variable fileID + HDF5OpenFile /P=$APathName /R fileID as AFileName + if (v_flag == 0) + string /g s_filepath + string /g s_scanpaths + s_filepath = s_path + s_filename + s_scanpaths = psh5_list_scans(fileID) + else + fileID = 0 + endif + + return fileID +end + +/// close a HDF5 file opened by psh5_open_file. +/// +/// this function just closes the HDF5 file. +/// no change is made to the loaded data. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +function psh5_close_file(fileID) + variable fileID + + HDF5CloseFile fileID +end + +/// load everything from a PShell data file. +/// +/// @param ANickName destination folder name (top level under root) +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// +/// @param AFileName if empty a dialog box shows up +/// +/// @param load_data select whether datasets (positioners and detectors) are loaded. +/// @arg 1 (default) load data. +/// @arg 0 do not load data. +/// +/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. +/// for proper wave scaling, the attributes must be loaded. +/// @arg 1 (default) load attributes. +/// @arg 0 do not load attributes. +/// +/// @return complete path of the loaded file if successful. +/// empty string otherwise. +/// +/// @return global string s_filepath in new data folder contains the full file path on disk. +/// +/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. +/// +function /s psh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr]) + string ANickName + string APathName + string AFileName + variable load_data + variable load_attr + + if (ParamIsDefault(load_data)) + load_data = 1 + endif + if (ParamIsDefault(load_attr)) + load_attr = 1 + endif + + dfref saveDF = GetDataFolderDFR() + + // performance monitoring + variable timerRefNum + variable /g psh5_perf_secs + timerRefNum = startMSTimer + + variable fileID = psh5_open_file(ANickName, APathName, AFileName) + if (fileID) + dfref fileDF = GetDataFolderDFR() + svar s_filepath + svar s_scanpaths + AFileName = s_filepath + print "loading " + s_filepath + "\r" + + variable ig + variable ng = ItemsInList(s_scanpaths, ";") + string sg + string folder + + for (ig = 0; ig < ng; ig += 1) + sg = StringFromList(ig, s_scanpaths, ";") + folder = CleanupName(ReplaceString("/", sg, ""), 0) + setdatafolder fileDF + newdatafolder /s /o $folder + psh5_load_scan_complete(fileID, sg, load_data=load_data, load_attr=load_attr) + endfor + + psh5_close_file(fileID) + else + AFileName = "" + endif + + psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 + + setdatafolder saveDF + return AFileName +end + +/// load a preview image from a PShell data file. +/// +/// the data wave is loaded into the current data folder. +/// attributes are loaded into the attr subfolder. existing waves in attr are deleted. +/// +/// @warning EXPERIMENTAL +/// this function uses the root:pearl_area:preview data folder. existing data there may be deleted! +/// +/// @param ANickName destination wave name. the wave is created in the current data folder. +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// +/// @param AFileName if empty a dialog box shows up +/// +/// @param load_data 1 (default): load data; 0: do not load data +/// +/// @param load_attr 1 (default): load attributes; 0: do not load attributes +/// note: for correct scaling of the image, the attributes need to be loaded +/// +/// @return name of loaded preview wave. +/// +function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr]) + string ANickName + string APathName + string AFileName + variable load_data + variable load_attr + + if (ParamIsDefault(load_data)) + load_data = 1 + endif + if (ParamIsDefault(load_attr)) + load_attr = 1 + endif + + dfref saveDF = GetDataFolderDFR() + setdatafolder root: + newdatafolder /o/s pearl_area + newdatafolder /o/s preview + + variable fileID + string scanpaths = "" + string dataname = "" + + // performance monitoring + variable timerRefNum + variable /g adh5_perf_secs + timerRefNum = startMSTimer + + HDF5OpenFile /P=$APathName /R /Z fileID as AFileName + if (v_flag == 0) + AFileName = s_path + s_filename + dfref fileDF = GetDataFolderDFR() + scanpaths = psh5_list_scans(fileID) + variable ig + string sg + ig = 0 + sg = StringFromList(ig, scanpaths, ";") + dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr) + + wave /z data = $dataname + string destpath = GetDataFolder(1, saveDF) + ANickName + if (waveexists(data)) + duplicate /o data, $destpath + wave /z data = $destpath + endif + + if (load_attr) + setdatafolder saveDF + newdatafolder /o/s attr + killwaves /a/z + psh5_load_scan_attrs(fileID, sg) + setdatafolder :: + endif + + HDF5CloseFile fileID + endif + + if (timerRefNum >= 0) + adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 + endif + + setdatafolder saveDF + return dataname +end + +/// load all data of a selected scan from a PShell data file. +/// +/// data is loaded into the current data folder. +/// attribute datasets are loaded into sub-folder `attr`. +/// existing data, if present, is overwritten. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @param load_data select whether datasets (positioners and detectors) are loaded. +/// @arg 1 (default) load data. +/// @arg 0 do not load data. +/// +/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. +/// for proper wave scaling, the attributes must be loaded. +/// @arg 1 (default) load attributes. +/// @arg 0 do not load attributes. +/// +/// @return semicolon-separated list of the loaded data waves (excluding attributes). +/// +function /s psh5_load_scan_complete(fileID, scanpath, [load_data, load_attr]) + variable fileID + string scanpath + variable load_data + variable load_attr + + if (ParamIsDefault(load_data)) + load_data = 1 + endif + if (ParamIsDefault(load_attr)) + load_attr = 1 + endif + + dfref saveDF = GetDataFolderDFR() + + dfref dataDF = GetDataFolderDFR() + string wavenames + string attrnames + psh5_load_scan_meta(fileID, scanpath) + if (load_data) + wavenames = psh5_load_scan_data(fileID, scanpath) + endif + if (load_attr) + newdatafolder /s /o attr + attrnames = psh5_load_scan_attrs(fileID, scanpath) + endif + if (load_data && load_attr) + setdatafolder dataDF + ps_scale_datasets() + endif + + setdatafolder saveDF + return wavenames +end + +/// list scan groups of a PShell data file. +/// +/// the function returns a list of all top-level groups whose name starts with "scan". +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @return semicolon-separated list of group paths. +/// +function /s psh5_list_scans(fileID) + variable fileID + + HDF5ListGroup /F /TYPE=1 fileID, "/" + + variable ig + variable ng = ItemsInList(S_HDF5ListGroup, ";") + string sg + string scans = "" + + for (ig = 0; ig < ng; ig += 1) + sg = StringFromList(ig, S_HDF5ListGroup, ";") + if (cmpstr(sg[1,4], "scan") == 0) + scans = AddListItem(sg, scans, ";", inf) + endif + endfor + + return scans +end + +/// list datasets of a PShell scan group. +/// +/// the function returns a list of all datasets of the selected scan. +/// this does not include datasets from the attributes sub-group. +/// +/// @note in a future version, an option may be introduced to filter datasets by function (_Readable_ and/or _Writable_). +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @return semicolon-separated list of dataset paths. +/// +function /s psh5_list_scan_datasets(fileID, scanpath) + variable fileID + string scanpath + + HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath + + return S_HDF5ListGroup +end + +/// load all datasets of a PShell scan group. +/// +/// data is loaded into the current data folder. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @return semicolon-separated list of the loaded waves. +/// +function /s psh5_load_scan_data(fileID, scanpath) + variable fileID + string scanpath + string wavenames = "" + + HDF5ListGroup /F /TYPE=2 /Z fileID, scanpath + + if (!v_flag) + variable ids + variable nds = ItemsInList(S_HDF5ListGroup, ";") + string sds + string sw + + for (ids = 0; ids < nds; ids += 1) + sds = StringFromList(ids, S_HDF5ListGroup, ";") + sw = psh5_load_dataset(fileID, "", sds, set_scale=0) + wavenames = AddListItem(sw, wavenames, ";", inf) + endfor + endif + + return wavenames +end + +/// load attributes of a PShell scan group. +/// +/// "attributes" are the auxiliary data inside the attrs group. +/// do not confuse with HDF5 attributes! +/// HDF5 attributes are loaded by the psh5_load_scan_meta() function. +/// +/// data is loaded into the current data folder. +/// this should normally be the `:attr` folder inside the respective scan folder. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @param attr_sets specify the attribute sets to be loaded. +/// this value can be an arithmetic OR of the following constants. +/// by default, all attributes are loaded. +/// @arg 1 all datasets that are present in the file. +/// @arg 2 datasets relevant for wave scaling of Scienta data. +/// +/// @return semicolon-separated list of the loaded waves. +/// +function /s psh5_load_scan_attrs(fileID, scanpath, [attr_sets]) + variable fileID + string scanpath + variable attr_sets + + if (ParamIsDefault(attr_sets)) + attr_sets = 1 + endif + + string attr_path = ReplaceString("//", scanpath + "/attrs", "/") + string attr_list = "" + if (attr_sets & 1) + HDF5ListGroup /TYPE=2 /Z fileID, attr_path + if (!v_flag) + attr_list = S_HDF5ListGroup + endif + endif + + if (attr_sets & 2) + attr_list = AddListItem("LensMode", attr_list, ";", inf) + attr_list = AddListItem("ScientaChannelBegin", attr_list, ";", inf) + attr_list = AddListItem("ScientaChannelEnd", attr_list, ";", inf) + attr_list = AddListItem("ScientaSliceBegin", attr_list, ";", inf) + attr_list = AddListItem("ScientaSliceEnd", attr_list, ";", inf) + endif + + variable ids + variable nds = ItemsInList(attr_list, ";") + string sds + string wavenames = "" + for (ids = 0; ids < nds; ids += 1) + sds = StringFromList(ids, attr_list, ";") + HDF5LoadData /O /Q /Z fileID, attr_path + "/" + sds + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + endfor + + return wavenames +end + +/// load metadata of a PShell scan group. +/// +/// _metadata_ are the HDF5 attributes attached to the scan group. +/// the following attributes are loaded. +/// the respective wave names under Igor are given in parentheses. +/// +/// - Dimensions (ScanDimensions) +/// - Writables (ScanWritables) +/// - Readables (ScanReadables) +/// - Steps (ScanSteps) +/// +/// if they are missing in the file, `ScanDimensions` and `ScanReadables` are set to default values +/// assuming the file contains a single spectrum. +/// +/// data is loaded into the current data folder. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @return semicolon-separated list of the loaded waves. +/// +function /s psh5_load_scan_meta(fileID, scanpath) + variable fileID + string scanpath + string wavenames = "" + + HDF5LoadData /O /Q /Z /A="Dimensions" /N=ScanDimensions /TYPE=1 fileID, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + HDF5LoadData /O /Q /Z /A="Writables" /N=ScanWritables /TYPE=1 fileID, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Readables" /N=ScanReadables /TYPE=1 fileID, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Steps" /N=ScanSteps /TYPE=1 fileID, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + else + make /n=1 /o ScanDimensions + ScanDimensions = 0 + wavenames = AddListItem("ScanDimensions", wavenames, ";", inf) + make /n=1 /o /t ScanReadables + ScanReadables[0] = "ScientaSpectrum" + wavenames = AddListItem("ScanReadables", wavenames, ";", inf) + endif + + return wavenames +end + +/// load a dataset from an open PShell HDF5 file. +/// +/// if the dataset has a maximum of two dimensions, the function loads it at once. +/// if it has more than two dimension, the function calls psh5_load_dataset_slabs() to load the data slab by slab. +/// +/// - the metadata (HDF5 attributes) are loaded into the wave note, cf. psh5_load_dataset_meta(). +/// - dimension labels are set according the dataset name, cf. ps_set_dimlabels(). +/// - wave scaling is set if the necessary scan attributes have been loaded and the `set_scale` option is selected (default). +/// the attributes must be loaded by psh5_load_scan_meta() and psh5_load_scan_attrs() (attr_sets=2). +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @param dataset name of the dataset. +/// the name of the loaded wave is a cleaned up version of the dataset name. +/// +/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. +/// if multiple datasets are loaded from a file, +/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). +/// @arg 1 (default) set the wave scaling. +/// @arg 0 do not set the wave scaling. +/// +/// @return name of loaded wave if successful. empty string otherwise. +/// +function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale]) + variable fileID + string scanpath + string datasetname + variable set_scale + + string datasetpath + datasetpath = scanpath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. + InitHDF5DataInfo(di) + variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + if (err != 0) + print "error accessing detector/data" + return "" + endif + + string dataname + if (di.ndims < 3) + HDF5LoadData /O /Q /Z fileID, datasetpath + dataname = StringFromList(0, S_waveNames) + else + dataname = psh5_load_dataset_slabs(fileID, scanpath, datasetname) + endif + + wave /z data = $dataname + if (waveexists(data)) + psh5_load_dataset_meta(fileID, scanpath, datasetname, data) + ps_set_dimlabels(data) + if (set_scale) + ps_scale_dataset(data) + endif + else + dataname = "" + endif + + return dataname +end + +/// load a preview dataset from an open PShell HDF5 file. +/// +/// if the dataset has a maximum of two dimensions, the function loads it at once. +/// if it has more than two dimension, the function selects and loads one two-dimensional slab. +/// +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @param dataset name of the dataset. +/// the name of the loaded wave is a cleaned up version of the dataset name. +/// +/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. +/// if multiple datasets are loaded from a file, +/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). +/// @arg 1 (default) set the wave scaling. +/// @arg 0 do not set the wave scaling. +/// +/// @return name of loaded wave if successful. empty string otherwise. +/// +function /s psh5_load_scan_preview(fileID, scanpath, [set_scale]) + variable fileID + string scanpath + variable set_scale + + dfref saveDF = GetDataFolderDFR() + dfref dataDF = saveDF + + string datasets = psh5_list_scan_datasets(fileID, scanpath) + string datasetname = "" + // todo: this should be generalized + if (strsearch(datasets, "ScientaImage", 0) >= 0) + datasetname = "ScientaImage" + elseif (strsearch(datasets, "ScientaSpectrum", 0) >= 0) + datasetname = "ScientaSpectrum" + elseif (strsearch(datasets, "ScientaEnergyDistribution", 0) >= 0) + datasetname = "ScientaEnergyDistribution" + elseif (strsearch(datasets, "Counts", 0) >= 0) + datasetname = "Counts" + elseif (strsearch(datasets, "SampleCurrent", 0) >= 0) + datasetname = "SampleCurrent" + else + datasetname = StringFromList(0, datasets) + endif + string datasetpath + datasetpath = scanpath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. + InitHDF5DataInfo(di) + variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + if (err != 0) + print "error accessing detector/data" + return "" + endif + + string dataname + if (di.ndims < 3) + HDF5LoadData /O /Q /Z fileID, datasetpath + dataname = StringFromList(0, S_waveNames) + else + variable dim2start = 0 + variable dim2count = 1 + variable dim3start = 0 + variable dim3count = 1 + if (di.ndims >= 3) + dim2start = floor(di.dims[2] / 2) + dim2count = 1 + endif + if (di.ndims >= 4) + dim3start = floor(di.dims[3] / 2) + dim3count = 1 + endif + + dataname = psh5_load_dataset_slab(fileID, scanpath, datasetname, dim2start, dim2count, dim3start, dim3count) + endif + + wave /z data = $dataname + if (waveexists(data)) + if (set_scale) + setdatafolder dataDF + newdatafolder /o/s attr + killwaves /a/z + psh5_load_scan_attrs(fileID, scanpath, attr_sets=2) + setdatafolder dataDF + ps_scale_dataset(data) + endif + else + dataname = "" + endif + + return dataname +end + +/// load metadata of a PShell dataset. +/// +/// "metadata" are the HDF5 attributes attached to the scan dataset. +/// +/// data is added to the wave note. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param datapath path to the containing group in the HDF5 file. +/// path separator is the slash "/". +/// +/// @param dataset name of the dataset. +/// +/// @param datawave metadata is added to the wave note of this wave. +/// +/// @return 0 if successful, non-zero if an error occurred. +/// +function psh5_load_dataset_meta(fileID, datapath, datasetname, datawave) + variable fileID + string datapath + string datasetname + wave datawave + + dfref saveDF = GetDataFolderDFR() + SetDataFolder NewFreeDataFolder() + + string datasetpath = datapath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + string wnote + + HDF5LoadData /O /Q /Z /A="Writable Dimension" /N=WriteDim fileID, datasetpath + if (!v_flag) + wave WriteDim + // scan dimension starts at 1 + sprintf wnote, "ScanDimension=%u", WriteDim[0] + Note datawave, wnote + endif + + HDF5LoadData /O /Q /Z /A="Writable Index" /N=WriteIndex fileID, datasetpath + if (!v_flag) + wave WriteIndex + sprintf wnote, "WriteableIndex=%u", WriteIndex[0] + Note datawave, wnote + endif + + HDF5LoadData /O /Q /Z /A="Readable Index" /N=ReadIndex fileID, datasetpath + if (!v_flag) + wave ReadIndex + sprintf wnote, "ReadableIndex=%u", ReadIndex[0] + Note datawave, wnote + endif + + setdatafolder saveDF + return 0 +end + +/// load a dataset slab-wise from the open PShell HDF5 file. +/// +/// the function loads the dataset image by image using the hyperslab option. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param datapath path to the containing group in the HDF5 file. +/// path separator is the slash "/". +/// +/// @param dataset name of the dataset. +/// also defines the name of the loaded wave. +/// +/// @param progress select whether a progress window is displayed during the process. +/// @arg 1 (default) show progress window. +/// @arg 0 do not show progress window. +/// +/// @return name of loaded wave if successful. empty string otherwise. +/// +function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) + variable fileID + string datapath + string datasetname + variable progress + + if (ParamIsDefault(progress)) + progress = 1 + endif + + variable result = 0 + string datasetpath + string datawavename + datasetpath = datapath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. + InitHDF5DataInfo(di) + variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + if (err != 0) + print "error accessing detector/data" + return "" + endif + if (di.ndims < 2) + print "error: rank of dataset < 2" + return "" + endif + + variable idx, idy, idz, idt, izt + idx = 1 + idy = 0 + idz = 2 + idt = 3 + + variable nx, ny, nz, nt, nzt + nx = di.dims[idx] + ny = di.dims[idy] + nz = di.dims[idz] + nt = di.dims[idt] + make /n=(nx,ny,nz,nt) /o $datawavename + wave data = $datawavename + + nz = max(nz, 1) + nt = max(nt, 1) + nzt = nz * nt + izt = 0 + if (progress) + display_progress_panel("HDF5 Import", "Loading data...", nzt) + endif + + // load data image by image + HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) + wave slab + slab[][%Start] = 0 + slab[][%Stride] = 1 + slab[][%Count] = 1 + slab[][%Block] = 1 + slab[idx][%Block] = nx + slab[idy][%Block] = ny + + variable iz, it + for (iz = 0; iz < nz; iz += 1) + for (it = 0; it < nt; it += 1) + slab[idz][%Start] = iz + slab[idt][%Start] = it + HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath + wave slabdata // 2D, 3D, or 4D with singletons + data[][][iz][it] = slabdata[q][p][0][0] + + // progress window + izt += 1 + if (progress) + if (update_progress_panel(izt)) + result = -4 // user abort + break + endif + endif + endfor + if (result < 0) + break + endif + endfor + + if (progress) + kill_progress_panel() + endif + + killwaves /z slab, slabdata + if (!result) + ps_set_dimlabels(data) + return datawavename + else + killwaves /z data + return "" + endif +end + +/// load a single image from the open PShell data file. +/// +/// the function can average over a region in the extra dimensions. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param datapath path to the containing group in the HDF5 file. +/// path separator is the slash "/". +/// +/// @param dataset name of the dataset. +/// also defines the name of the loaded wave. +/// +/// @param dim2start 2nd dimension coordinate of the first image +/// set to 0 if dimension may not be present +/// +/// @param dim2count number of subsequent images to average +/// set to 1 if dimension may not be present +/// +/// @param dim3start 3rd dimension coordinate of the first image +/// set to 0 if dimension may not be present +/// +/// @param dim3count number of subsequent images to average +/// set to 1 if dimension may not be present +/// +/// @return name of loaded wave if successful. empty string otherwise. +/// +function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim2count, dim3start, dim3count) + variable fileID + string datapath + string datasetname + variable dim2start + variable dim2count + variable dim3start + variable dim3count + + string datasetpath + string datawavename + datasetpath = datapath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + + STRUCT HDF5DataInfo di + InitHDF5DataInfo(di) + variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + if (err != 0) + print "error accessing detector/data" + return "" + endif + if (di.ndims < 2) + print "error: rank of dataset < 2" + return "" + endif + + variable idx, idy, idz, idt + idx = 1 + idy = 0 + idz = 2 + idt = 3 + + variable nx, ny + nx = di.dims[idx] + ny = di.dims[idy] + make /n=(nx,ny) /o $datawavename + wave data = $datawavename + data = 0 + + HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) + wave slab + slab[][%Start] = 0 + slab[][%Stride] = 1 + slab[][%Count] = 1 + slab[][%Block] = 1 + slab[idx][%Block] = nx + slab[idy][%Block] = ny + + variable iz, it + variable navg = 0 + variable dim2end = dim2start + dim2count - 1 + variable dim3end = dim3start + dim3count - 1 + for (iz = dim2start; iz <= dim2end; iz += 1) + for (it = dim3start; it <= dim3end; it += 1) + slab[idz][%Start] = iz + slab[idt][%Start] = it + HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath + if (!v_flag) + wave slabdata + data += slabdata[q][p][0][0] + navg += 1 + endif + endfor + endfor + if (navg) + data /= navg + endif + + killwaves /z slab, slabdata + ps_set_dimlabels(data) + return datawavename +end + +/// set dimension labels according to the axis type +/// +/// this function asserts a particular ordering of dimensions types +/// based on the name of the wave for +/// ScientaImage, ScientaSpectrum, ImageAngleDistribution, ImageEnergyDistribution. +/// all other waves must be one-dimensional, and the dimension must be the scan dimension. +/// +/// dimension labels are required by scaling functions. +/// +function ps_set_dimlabels(data) + wave data + + string name = NameOfWave(data) + + // intrinsic dimensions + strswitch(name) + case "ScientaImage": + setdimlabel 0, -1, $kEnergyDimLabel, data + setdimlabel 1, -1, $kAngleDimLabel, data + if (WaveDims(data) >= 3) + setdimlabel 2, -1, $kScanDimLabel, data + endif + break + case "ScientaSpectrum": + setdimlabel 0, -1, $kEnergyDimLabel, data + break + case "ImageAngleDistribution": + setdimlabel 0, -1, $kScanDimLabel, data + setdimlabel 1, -1, $kAngleDimLabel, data + break + case "ImageEnergyDistribution": + setdimlabel 0, -1, $kScanDimLabel, data + setdimlabel 1, -1, $kEnergyDimLabel, data + break + default: + setdimlabel 0, -1, $kScanDimLabel, data + endswitch +end + +/// set the dimension scales of loaded PShell Scienta datasets according to attributes. +/// +/// the datasets must be in the current data folder. +/// all datasets listed in the ScanReadables waves are scaled +/// according to the attribute waves in the :attr folder. +/// +/// the dimension labels of the dataset waves must be set correctly, e.g. by ps_set_dimlabels(). +/// this is implicitly done by the high-level load functions. +/// +function ps_scale_datasets() + dfref dataDF = GetDataFolderDFR() + dfref attrDF = :attr + + make /n=3 /free lo, hi + make /n=3 /t /free un + ps_detect_scale(lo, hi, un) + + wave /t /z /SDFR=dataDF ScanReadables + if (WaveExists(ScanReadables)) + variable isr + variable nsr = numpnts(ScanReadables) + string ssr + for (isr = 0; isr < nsr; isr += 1) + wave /z /SDFR=dataDF wsr = $ScanReadables[isr] + if (WaveExists(wsr)) + ps_scale_dataset_2(wsr, lo, hi, un) + endif + endfor + endif +end + +/// set the dimension scales of a loaded PShell Scienta dataset according to attributes. +/// +/// the attributes must be in the child folder `:attr` next to the dataset. +/// +/// the dimension labels of the dataset waves must be set correctly, cf. ps_set_dimlabels(). +/// this is implicitly done by the high-level load functions. +/// +/// the function is useful if a single dataset is loaded and scaled. +/// if multiple datasets are loaded, ps_scale_datasets() is slightly more efficient. +/// +/// @param data data wave to be scaled. +/// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels(). +/// +function ps_scale_dataset(data) + wave data + + dfref saveDF = GetDataFolderDFR() + dfref dataDF = GetWavesDataFolderDFR(data) + setdatafolder dataDF + + make /n=3 /free lo, hi + make /n=3 /t /free un + ps_detect_scale(lo, hi, un) + ps_scale_dataset_2(data, lo, hi, un) + + setdatafolder saveDF +end + +/// detect the dimension scales from attributes. +/// +/// the function checks the current data folder and the sub-folder `:attr` for scan parameters. +/// the results are written to the provided waves. +/// the function is normally called by ps_scale_datasets() but can also be used independently. +/// +/// the provided waves are redimensioned by the function, and dimension labels are set. +/// the scale parameters can then be extracted by keyword, e.g., +/// @arg `lo[%%energy]` analyser energy dimension. +/// @arg `lo[%%angle]` analyser angle dimension. +/// @arg `lo[%%scan]` scan dimension. +/// @arg `lo[%%data]` data dimension (units). +/// +/// the function tries to read the following waves, +/// and may fall back to more or less reasonable default values if they are not found. +/// @arg `:attr:LensMode` +/// @arg `:attr:ScientaChannelBegin` +/// @arg `:attr:ScientaChannelEnd` +/// @arg `:attr:ScientaSliceBegin` +/// @arg `:attr:ScientaSliceEnd` +/// @arg `ScanWritables` +/// @arg wave referenced by `ScanWritables[0]` +/// +/// @param lo wave to receive the lower limits. +/// +/// @param hi wave to receive the upper limits. +/// +/// @param un text wave to receive the unit labels. +/// +/// @return the function results are written to the lo, hi, un waves. +/// +function ps_detect_scale(lo, hi, un) + wave lo + wave hi + wave /t un + + dfref dataDF = GetDataFolderDFR() + dfref attrDF = :attr + + redimension /n=4 lo, hi, un + setdimlabel 0, 0, $kEnergyDimLabel, lo, hi, un + setdimlabel 0, 1, $kAngleDimLabel, lo, hi, un + setdimlabel 0, 2, $kScanDimLabel, lo, hi, un + setdimlabel 0, 3, $kDataDimLabel, lo, hi, un + + // default values + lo[%$kEnergyDimLabel] = 0 + hi[%$kEnergyDimLabel] = 1 + un[%$kEnergyDimLabel] = "eV" + + lo[%$kAngleDimLabel] = -1 + hi[%$kAngleDimLabel] = 1 + un[%$kAngleDimLabel] = "" + + lo[%$kScanDimLabel] = 0 + hi[%$kScanDimLabel] = 1 + un[%$kScanDimLabel] = "" + + lo[%$kDataDimLabel] = 0 + hi[%$kDataDimLabel] = 0 + un[%$kDataDimLabel] = "arb." + + wave /SDFR=attrDF /T /Z LensMode + wave /SDFR=attrDF /Z ChannelBegin = ScientaChannelBegin + wave /SDFR=attrDF /Z ChannelEnd = ScientaChannelEnd + wave /SDFR=attrDF /Z SliceBegin = ScientaSliceBegin + wave /SDFR=attrDF /Z SliceEnd = ScientaSliceEnd + + // lens mode can give more detail + if (waveexists(LensMode) && (numpnts(LensMode) >= 1)) + strswitch(LensMode[0]) + case "Angular45": + lo[%$kAngleDimLabel] = -45/2 + hi[%$kAngleDimLabel] = +45/2 + un[%$kAngleDimLabel] = "deg" + break + case "Angular60": + lo[%$kAngleDimLabel] = -60/2 + hi[%$kAngleDimLabel] = +60/2 + un[%$kAngleDimLabel] = "deg" + break + case "Transmission": + un[%$kAngleDimLabel] = "arb." + break + endswitch + endif + + // best option if scales are explicit in separate waves + if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1)) + lo[%$kEnergyDimLabel] = ChannelBegin[0] + hi[%$kEnergyDimLabel] = ChannelEnd[0] + endif + if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1)) + lo[%$kAngleDimLabel] = SliceBegin[0] + hi[%$kAngleDimLabel] = SliceEnd[0] + endif + wave /z /t /SDFR=dataDF ScanWritables + if (WaveExists(ScanWritables)) + wave /z /SDFR=dataDF scanner = $ScanWritables[0] + if (!WaveExists(scanner)) + wave /z /SDFR=attrDF scanner = $ScanWritables[0] + endif + if (WaveExists(scanner)) + lo[%$kScanDimLabel] = scanner[0] + hi[%$kScanDimLabel] = scanner[numpnts(scanner)-1] + endif + endif +end + +/// set the dimension scales of a dataset. +/// +/// the function is normally called by ps_scale_datasets() but can also be used independently. +/// the limits and units must be given as function arguments with proper dimension labels. +/// +/// the provided limit and unit waves must have dimension labels +/// matching the -1 index dimension labels of the data wave, +/// such as set by the ps_detect_scale() function. +/// the scale parameters are extracted by keyword, e.g., +/// @arg `lo[%%energy]` analyser energy dimension. +/// @arg `lo[%%angle]` analyser angle dimension. +/// @arg `lo[%%scan]` scan dimension. +/// @arg `lo[%%data]` data dimension. +/// +/// @param data data wave to be scaled. +/// dimension labels (index -1) must be set to match the limit waves. +/// +/// @param lo lower limits. +/// +/// @param hi upper limits. +/// +/// @param un unit labels. +/// +function ps_scale_dataset_2(data, lo, hi, un) + wave data + wave lo + wave hi + wave /t un + + string sdim + sdim = GetDimLabel(data, 0, -1) + if (strlen(sdim)) + setscale /i x lo[%$sdim], hi[%$sdim], un[%$sdim], data + endif + + sdim = GetDimLabel(data, 1, -1) + if (strlen(sdim)) + setscale /i y lo[%$sdim], hi[%$sdim], un[%$sdim], data + endif + + sdim = GetDimLabel(data, 2, -1) + if (strlen(sdim)) + setscale /i z lo[%$sdim], hi[%$sdim], un[%$sdim], data + endif + + setscale d 0, 0, un[%$kDataDimLabel], data +end + +/// load and reduce the ScientaImage dataset of the first scan of a PShell data file. +/// +/// the resulting dataset is reduced in one image dimension by a user-defined reduction function, +/// e.g. by region-of-interest integration, curve fitting, etc. +/// +/// the function loads the dataset image by image using the hyperslab option +/// and applies a custom reduction function to each image. +/// the results from the reduction function are composed into one result wave. +/// the raw data are discarded. +/// +/// if the data is from the electron analyser driver and some special attributes are included, +/// the function will set the scales of the image dimensions. +/// +/// @param ANickName destination folder name (top level under root). +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed. +/// +/// @param AFileName if empty a dialog box shows up. +/// +/// @param reduction_func custom reduction function +/// (any user-defined function which has the same parameters as adh5_default_reduction()) +/// +/// @param reduction_param parameter string for the reduction function. +/// +/// @param progress progress window. +/// @arg 1 (default) show progress window +/// @arg 0 do not show progress window +/// +/// @return semicolon-separated list of the loaded waves, +/// `ReducedData1` and `ReducedData2` if successful. +/// empty string if an error occurred. +/// error messages are printed to the history. +/// +/// @return global string s_filepath in new data folder contains the full file path on disk. +/// +/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. +/// +/// @todo load scan positions. +/// +function /s psh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [progress]) + string ANickName + string APathName + string AFileName + funcref adh5_default_reduction reduction_func + string reduction_param + variable progress + + if (ParamIsDefault(progress)) + progress = 1 + endif + + dfref saveDF = GetDataFolderDFR() + + // performance monitoring + variable timerRefNum + variable /g psh5_perf_secs + timerRefNum = startMSTimer + + variable fileID = psh5_open_file(ANickName, APathName, AFileName) + string wavenames = "" + if (fileID) + dfref fileDF = GetDataFolderDFR() + svar s_filepath + svar s_scanpaths + AFileName = s_filepath + print "loading " + s_filepath + "\r" + + variable ig = 0 + variable ng = ItemsInList(s_scanpaths) + string sg + string folder + + sg = StringFromList(ig, s_scanpaths) + folder = CleanupName(ReplaceString("/", sg, ""), 0) + setdatafolder fileDF + newdatafolder /s /o $folder + dfref dataDF = GetDataFolderDFR() + psh5_load_scan_meta(fileID, sg) + newdatafolder /s /o attr + psh5_load_scan_attrs(fileID, sg) + setdatafolder dataDF + wavenames = psh5_load_dataset_reduced(fileID, sg, "ScientaImage", reduction_func, reduction_param, progress=progress) + + psh5_close_file(fileID) + endif + + if (timerRefNum >= 0) + psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 + endif + + setdatafolder saveDF + return wavenames +end + + +/// load a reduced dataset from the open PShell HDF5 file. +/// +/// the function loads the dataset image by image using the hyperslab option +/// and applies a custom reduction function to each image. +/// the results from the reduction function are written to the `ReducedData1` and `ReducedData2` waves. +/// the raw data are discarded. +/// +/// by default, the reduction function is called in separate threads to reduce the total loading time. +/// (see the global variable psh5_perf_secs which reports the total run time of the function.) +/// the effect varies depending on the balance between file loading (image size) +/// and data processing (complexity of the reduction function). +/// for debugging the reduction function, multi-threading can be disabled. +/// +/// if the reduction function requires the image waves to be scaled properly, +/// the attributes must have been loaded by psh5_load_scan_attrs() before. +/// in this case, the scales of the result waves are also set by the function. +/// otherwise, the results can also be scaled by ps_scale_dataset() later. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to scan group in the HDF5 file. +/// +/// @param dataset name of the dataset. +/// +/// @param reduction_func custom reduction function +/// (any user-defined function which has the same parameters as adh5_default_reduction()). +/// +/// @param reduction_param parameter string for the reduction function. +/// +/// @param progress progress window. +/// @arg 1 (default) show progress window +/// @arg 0 do not show progress window +/// +/// @param nthreads +/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread). +/// @arg 0 use main thread only (e.g. for debugging the reduction function). +/// @arg >= 1 use a fixed number of (additional) threads. +/// +/// @return semicolon-separated list of the loaded waves, +/// `ReducedData1` and `ReducedData2` if successful. +/// empty string if an error occurred. +/// error messages are printed to the history. +/// +function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_func, reduction_param, [progress, nthreads]) + variable fileID + string scanpath + string datasetname + funcref adh5_default_reduction reduction_func + string reduction_param + variable progress + variable nthreads + + if (ParamIsDefault(progress)) + progress = 1 + endif + if (ParamIsDefault(nthreads)) + nthreads = -1 + endif + + variable result = 0 + string datasetpath + string datawavename + string wavenames = "" + + datasetpath = scanpath + "/" + datasetname + datasetpath = ReplaceString("//", datasetpath, "/") + datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. + InitHDF5DataInfo(di) + variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + if (err != 0) + print "error accessing detector/data" + result = -1 + return wavenames + endif + if (di.ndims < 2) + print "error: rank of dataset < 2" + result = -2 + return wavenames + endif + + variable idx, idy, idz, idt + idx = 1 + idy = 0 + idz = 2 + idt = 3 + + variable nx, ny, nz, nt, nzt + nx = di.dims[idx] + ny = di.dims[idy] + nz = di.dims[idz] + nt = di.dims[idt] + make /n=(nx,ny,nz,nt) /o $datawavename /wave=data + + // adjust nz and nt *after* making the data wave + nz = max(nz, 1) + nt = max(nt, 1) + nzt = nz * nt + + // load data image by image + HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4)) + wave slab + slab[][%Start] = 0 + slab[][%Stride] = 1 + slab[][%Count] = 1 + slab[][%Block] = 1 + slab[idx][%Block] = nx + slab[idy][%Block] = ny + + // set up multi threading + if (nthreads < 0) + nthreads = ThreadProcessorCount + endif + if (nthreads > 0) + variable threadGroupID = ThreadGroupCreate(nthreads) + variable ithread + for (ithread = 0; ithread < nthreads; ithread += 1) + ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func) + endfor + else + make /n=(nzt) /df /free processing_folders + endif + + if (progress) + display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt) + endif + + make /n=(nx,ny) /d /o image_template + setdimlabel 0, -1, $kEnergyDimLabel, image_template + setdimlabel 1, -1, $kAngleDimLabel, image_template + ps_scale_dataset(image_template) + + variable iz, it, izt + string dfname + izt = 0 + for (iz = 0; iz < nz; iz += 1) + for (it = 0; it < nt; it += 1) + // load hyperslab + slab[idz][%Start] = iz + slab[idt][%Start] = it + dfname = "processing_" + num2str(izt) + newdatafolder /s $dfname + HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath + + // send to processing queue + duplicate image_template, image + variable /g r_index = iz + variable /g s_index = it + string /g func_param = reduction_param + + if (nthreads > 0) + WaveClear image + ThreadGroupPutDF threadGroupID, : + else + processing_folders[izt] = GetDataFolderDFR() + make /n=1/d profile1, profile2 + wave slabdata + variable /g func_result + func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param) + WaveClear slabdata, image, profile1, profile2 + setdatafolder :: + endif + + izt += 1 + // progress window + if (progress) + if (update_progress_panel(izt)) + print "user abort" + result = -4 + break + endif + endif + endfor + endfor + + killwaves /z slab, slabdata, image_template + if (progress) + update_progress_panel(0, message="Processing data (step 2 of 2)...") + endif + + dfref dfr + for (izt = 0; (izt < nzt) && (result == 0); izt += 1) + if (nthreads > 0) + do + dfr = ThreadGroupGetDFR(threadGroupID, 1000) + if (DatafolderRefStatus(dfr) != 0) + break + endif + if (progress) + if (update_progress_panel(izt)) + print "user abort" + result = -4 + break + endif + endif + while (1) + else + dfr = processing_folders[izt] + if (progress) + if (update_progress_panel(izt)) + print "user abort" + result = -4 + break + endif + endif + endif + + if (result != 0) + break + endif + + nvar rr = dfr:r_index + nvar ss = dfr:s_index + nvar func_result = dfr:func_result + wave profile1 = dfr:profile1 + wave profile2 = dfr:profile2 + + if (func_result == 0) + if (izt == 0) + make /n=(dimsize(profile1, 0), nz, nt) /d /o ReducedData1 + make /n=(dimsize(profile2, 0), nz, nt) /d /o ReducedData2 + setdimlabel 0, -1, $getdimlabel(profile1, 0, -1), ReducedData1 + setdimlabel 0, -1, $getdimlabel(profile2, 0, -1), ReducedData2 + setdimlabel 1, -1, $kScanDimLabel, ReducedData1 + setdimlabel 1, -1, $kScanDimLabel, ReducedData2 + setscale /p x dimoffset(profile1, 0), dimdelta(profile1, 0), waveunits(profile1, 0), ReducedData1 + setscale /p x dimoffset(profile2, 0), dimdelta(profile2, 0), waveunits(profile2, 0), ReducedData2 + setscale d 0, 0, waveunits(profile1, -1), ReducedData1 + setscale d 0, 0, waveunits(profile2, -1), ReducedData2 + endif + ReducedData1[][rr][ss] = profile1[p] + ReducedData2[][rr][ss] = profile2[p] + else + print "error during data reduction." + result = -3 + break + endif + endfor + + if (nthreads > 0) + variable tstatus = ThreadGroupRelease(threadGroupID) + if (tstatus == -2) + print "error: thread did not terminate properly." + result = -5 + endif + else + for (izt = 0; izt < nzt; izt += 1) + KillDataFolder /Z processing_folders[izt] + endfor + endif + + if (result == 0) + if (nz == 1) + redimension /n=(-1, 0, 0) ReducedData1 + redimension /n=(-1, 0, 0) ReducedData2 + elseif (nt == 1) + redimension /n=(-1, nz, 0) ReducedData1 + redimension /n=(-1, nz, 0) ReducedData2 + endif + wavenames = "ReducedData1;ReducedData2;" + ps_scale_dataset(ReducedData1) + ps_scale_dataset(ReducedData2) + endif + if (progress) + kill_progress_panel() + endif + + return wavenames +end + +threadsafe static function reduce_slab_worker(reduction_func) + funcref adh5_default_reduction reduction_func + do + // wait for job from main thread + do + dfref dfr = ThreadGroupGetDFR(0, 1000) + if (DataFolderRefStatus(dfr) == 0) + if (GetRTError(2)) + return 0 // no more jobs + endif + else + break + endif + while (1) + + // get input data + wave slabdata = dfr:slabdata + wave image = dfr:image + svar func_param = dfr:func_param + nvar rr = dfr:r_index + nvar ss = dfr:s_index + + // do the work + newdatafolder /s outDF + make /n=1/d profile1, profile2 + variable /g r_index = rr + variable /g s_index = ss + variable /g func_result + func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param) + + // send output to queue and clean up + WaveClear slabdata, image, profile1, profile2 + ThreadGroupPutDF 0, : + KillDataFolder dfr + while (1) + + return 0 +end + +threadsafe static function reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, reduction_param) + wave slabdata + wave image + wave profile1 + wave profile2 + funcref adh5_default_reduction reduction_func + string reduction_param + + image = slabdata[q][p][0][0] + + return reduction_func(image, profile1, profile2, reduction_param) +end + +/// load descriptive info from a PShell data file. +/// +/// the info string lists the following information for each scan contained in the file: +/// - path of the scan group inside the file. +/// - number of scan positions. +/// - dataset names of scan positioners. +/// - dataset names of detectors. +/// +/// @param APathName igor symbolic path name. can be empty if the path is specified in AFileName or a dialog box should be displayed +/// +/// @param AFileName if empty a dialog box shows up +/// +/// @return newline terminated string. +/// +function /s psh5_load_info(APathName, AFileName) + string APathName + string AFileName + + dfref saveDF = GetDataFolderDFR() + dfref fileDF = NewFreeDataFolder() + setdatafolder fileDF + + variable fileID + string filepath + string scanpaths + variable nscans + variable iscan + string scanpath + string info = "" + + HDF5OpenFile /P=$APathName /R fileID as AFileName + if (v_flag == 0) + filepath = s_path + s_filename + scanpaths = psh5_list_scans(fileID) + nscans = ItemsInList(scanpaths) + for (iscan = 0; iscan < nscans; iscan += 1) + scanpath = StringFromList(iscan, scanpaths) + info = info + scanpath + "\r" + info = info + psh5_load_scan_info(fileID, scanpath) + endfor + HDF5CloseFile fileID + endif + + setdatafolder saveDF + return info +end + +/// load descriptive info from a PShell scan. +/// +/// the info string contains up to three lines which are made up of the following information: +/// - number of scan positions. +/// - dataset names of scan positioners. +/// - dataset names of detectors. +/// +/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// +/// @param scanpath path to scan group in the HDF5 file. +/// +/// @return newline terminated string. +/// +function /s psh5_load_scan_info(fileID, scanpath) + variable fileID + string scanpath + + string info = "" + string positions = "" + string positioners = "" + string detectors = "" + + psh5_load_scan_meta(fileID, scanpath) + wave /z ScanDimensions + wave /t /z ScanWritables + wave /t /z ScanReadables + wave /z ScanSteps + + if (WaveExists(ScanSteps) && (numpnts(ScanSteps) >= 1)) + ScanSteps += 1 + positions = "positions = (" + wave2list(ScanSteps, "%u", ",") + ")" + info = AddListItem(positions, info, "\r", inf) + endif + if (WaveExists(ScanWritables) && (numpnts(ScanWritables) >= 1)) + positioners = "positioners = " + twave2list(ScanWritables, ",") + info = AddListItem(positioners, info, "\r", inf) + endif + if (WaveExists(ScanReadables) && (numpnts(ScanReadables) >= 1)) + detectors = "detectors = " + twave2list(ScanReadables, ",") + info = AddListItem(detectors, info, "\r", inf) + endif + + return info +end + +/// convert text wave to list. +/// +/// +static function /s twave2list(wt, sep) + wave /t wt + string sep + + string list = "" + variable n = numpnts(wt) + variable i + for (i = 0; i < n; i += 1) + list = AddListItem(wt[i], list, sep, inf) + endfor + + return list +end + +/// convert numeric wave to list. +/// +/// +static function /s wave2list(w, format, sep) + wave w + string format + string sep + + string list = "" + variable n = numpnts(w) + variable i + string s + for (i = 0; i < n; i += 1) + sprintf s, format, w[i] + list = AddListItem(s, list, sep, inf) + endfor + + return list +end