From fa24916aa6d89718f556f9e15c0def42c8ee2a15 Mon Sep 17 00:00:00 2001 From: matthias muntwiler Date: Tue, 1 Mar 2022 15:28:19 +0100 Subject: [PATCH] code changes for release 3.0.0: new PShell import --- LICENSE | 2 +- README.md | 25 +- doc/src/mainpage.dox | 11 +- mm/mm-physconst.ipf | 1 + pearl/pearl-area-import.ipf | 244 +- pearl/pearl-data-explorer.ipf | 2396 +++++++++++------- pearl/pearl-elog.ipf | 6 +- pearl/pearl-pshell-import.ipf | 3690 ++++++++++++++-------------- pearl/pearl-scienta-preprocess.ipf | 142 ++ 9 files changed, 3592 insertions(+), 2925 deletions(-) diff --git a/LICENSE b/LICENSE index ca2703d..50e0268 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2009-2019 Paul Scherrer Institut + Copyright 2009-2022 Paul Scherrer Institut Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 0f22b1a..330f2a9 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Introduction ============ -PEARL Procedures is a suite of Igor Pro procedures developed for data acquisition and data processing at the PEARL beamline at the Swiss Light Source. +PEARL Procedures is a suite of Igor Pro procedures developed for data acquisition and data processing at the PEARL beamline at the Swiss Light Source. PEARL Procedures requires Igor Pro 8 or newer. Installation @@ -11,17 +11,14 @@ PEARL Procedures should be installed according to the regular Igor Pro guideline - Make a `pearl-procs` directory in your private or shared `User Procedures` folder, and copy the PEARL Procedures distribution there. - Create shortcuts of the `pearl-arpes.ipf` and `pearl-menu.ipf` files, and move them to the `Igor Procedures` folder next to your `User Procedures` folder. -- Find the `HDF5.XOP` (`HDF5-64.xop` for Igor 7 64-bit) extension in the `Igor Pro Folder` under `More Extensions/File Loaders` (`More Extensions (64-bit)/File Loaders`), create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder. + +Igor Pro 9 imports the HDF5 library by default. For earlier versions: + +- Find the `HDF5.XOP` (`HDF5-64.xop` for 64-bit) extension in the `Igor Pro Folder` under `More Extensions/File Loaders` (`More Extensions (64-bit)/File Loaders`), create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder. - Find the `HDF5 Help.ihf` next to `HDF5.XOP`, create a shortcut, and move the shortcut to the `Igor Help Files` folder next to your `User Procedures` folder. -PEARL Procedures are tested on Igor 8.04, 64-bit. -Please make sure to use the latest release version. - -While most of the code remains compatible with Igor 6.37, it is not tested and not supported. -Importing recent PShell data files may requires Igor 8 due to changes in the HDF5 library. -Igor 7 contains some bugs which affect PEARL Procedures and should not be used. - -As long as no Igor 8 specific features are used (long object names), the produced experiment files remain compatible with Igor 6. +PEARL Procedures are tested on Igor Pro 8.04, 64-bit. +Please make sure to use the latest release version of Igor Pro. License @@ -39,12 +36,18 @@ Matthias Muntwiler, Copyright --------- -Copyright 2009-2021 by [Paul Scherrer Institut](http://www.psi.ch) +Copyright 2009-2022 by [Paul Scherrer Institut](http://www.psi.ch) Release Notes ============= +## rev-distro-3.0.0 + +- New panel and procedure interface for PShell data file import. +- Support for latest PShell file structure. +- Igor Pro 8.04 or later is required. + ## rev-distro-2.2.0 - Updates, bugfixes and performance improvements in angle scan processing. diff --git a/doc/src/mainpage.dox b/doc/src/mainpage.dox index 6ebe118..735f9f4 100644 --- a/doc/src/mainpage.dox +++ b/doc/src/mainpage.dox @@ -9,11 +9,18 @@ PEARL Procedures is a suite of Igor Pro procedures developed for data acquisitio \section sec_install Installation +PEARL Procedures are tested on Igor Pro 8.04, 64-bit. +Compatibility with earlier versions of Igor has been dropped. +Please make sure to use the latest release version of Igor Pro. + PEARL Procedures should be installed according to the regular Igor Pro guidelines. Please read the Igor help `About Igor Pro User Files` for details. - Make a `pearl-procs` directory in your private or shared `User Procedures` folder, and copy the PEARL Procedures distribution there. - Create shortcuts of the `pearl-arpes.ipf` and `pearl-menu.ipf` files, and move them to the `Igor Procedures` folder next to your `User Procedures` folder. -- Find the `HDF5.XOP` extension in the `Igor Pro Folder` under `More Extensions/File Loaders`, create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder. + +Igor Pro 9 imports the HDF5 library by default. For earlier versions: + +- Find the `HDF5.XOP` (`HDF5-64.xop` for 64-bit) extension in the `Igor Pro Folder` under `More Extensions/File Loaders` (`More Extensions (64-bit)/File Loaders`), create a shortcut, and move the shortcut to the `Igor Extensions` folder next to your `User Procedures` folder. - Find the `HDF5 Help.ihf` next to `HDF5.XOP`, create a shortcut, and move the shortcut to the `Igor Help Files` folder next to your `User Procedures` folder. @@ -25,6 +32,6 @@ Please read and respect the respective license agreements. \author Matthias Muntwiler, \version This documentation is compiled from version $(REVISION). -\copyright 2009-2016 by [Paul Scherrer Institut](http://www.psi.ch) +\copyright 2009-2022 by [Paul Scherrer Institut](http://www.psi.ch) \copyright Licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0) */ diff --git a/mm/mm-physconst.ipf b/mm/mm-physconst.ipf index 154ed38..d0aed6c 100644 --- a/mm/mm-physconst.ipf +++ b/mm/mm-physconst.ipf @@ -1,3 +1,4 @@ +#pragma TextEncoding = "UTF-8" #pragma rtGlobals=1 // Use modern global access method. #pragma version = 1.05 diff --git a/pearl/pearl-area-import.ipf b/pearl/pearl-area-import.ipf index 58719ff..ade1234 100644 --- a/pearl/pearl-area-import.ipf +++ b/pearl/pearl-area-import.ipf @@ -1,12 +1,15 @@ -#pragma TextEncoding = "Windows-1252" +#pragma TextEncoding = "UTF-8" #pragma rtGlobals=3 // Use modern global access method and strict wave access. -#pragma IgorVersion = 6.2 +#pragma IgorVersion = 6.36 #pragma ModuleName = PearlAreaImport +#pragma version = 1.13 +#if IgorVersion() < 9.00 #include +#endif #include "pearl-compat" #include "pearl-gui-tools" -// copyright (c) 2013-20 Paul Scherrer Institut +// copyright (c) 2013-21 Paul Scherrer Institut // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -1195,6 +1198,7 @@ end /// a numeric index is appended to distinguish the results. /// the index starts at 1. existing waves are overwritten. /// +/// @return result code: 0 for success, < 0 for error /// function adh5_reduce_brick(source, reduction_func, reduction_param, result_prefix, [progress, nthreads]) wave source @@ -1211,7 +1215,10 @@ function adh5_reduce_brick(source, reduction_func, reduction_param, result_prefi if (ParamIsDefault(nthreads)) nthreads = -1 endif + + dfref base_df = GetDataFolderDFR() variable result = 0 + string wavenames = "" // nx and nz are the image dimensions variable nx, ny, nz, nt @@ -1219,10 +1226,9 @@ function adh5_reduce_brick(source, reduction_func, reduction_param, result_prefi ny = dimsize(source, 1) nz = dimsize(source, 2) // force 4th dimension to singleton (ad_extract_slab handles 3 dimensions only) - nt = 0 + nt = 1 variable nzt = max(nz, 1) * max(nt, 1) - variable izt // set up multi threading if (nthreads < 0) @@ -1239,133 +1245,159 @@ function adh5_reduce_brick(source, reduction_func, reduction_param, result_prefi endif if (progress) - display_progress_panel("data reduction", "extracting data (step 1 of 2)...", nzt) + display_progress_panel("Reduction", "Processing data...", nzt) endif variable iz, it + variable n_sent = 0 + variable n_recvd = 0 + variable tmo = 0 string dfname + dfref dfr variable iw, nw string sw make /n=0 /free /wave result_waves + + iz = 0 + it = 0 - izt = 0 - for (iz = 0; iz < max(nz, 1); iz += 1) - for (it = 0; it < max(nt, 1); it += 1) - dfname = "processing_" + num2str(izt) - newdatafolder /s $dfname - ad_extract_slab(source, nan, nan, nan, nan, iz, iz, "image", pscale=1) - wave image + do + // fill the processing queue up to a maximum number of folders + if (n_sent < max(1, nthreads) * 10 + n_recvd) + if (iz < nz) + if (it < nt) + // load a slab into a temporary folder + dfname = "processing_" + num2str(n_sent) + NewDataFolder /s $dfname + ad_extract_slab(source, nan, nan, nan, nan, iz, iz, "image", pscale=1) + wave image + variable /g r_index = iz + variable /g s_index = it + string /g func_param = reduction_param - // send to processing queue - variable /g r_index = iz - variable /g s_index = it - string /g func_param = reduction_param - - if (nthreads > 0) - WaveClear image - ThreadGroupPutDF threadGroupID, : - else - processing_folders[izt] = GetDataFolderDFR() - string param = reduction_param - wave /wave reduced_waves = reduction_func(image, param) - variable /g func_result = numpnts(reduced_waves) - adh5_get_result_waves(reduced_waves, "redw_", 0) - WaveClear image, reduced_waves - setdatafolder :: - endif - - izt += 1 - // progress window - if (progress) - if (update_progress_panel(izt)) - result = -4 // user abort - break - endif - endif - endfor - endfor - - if (progress) - update_progress_panel(0, message="processing data (step 2 of 2)...") - endif - - dfref dfr - for (izt = 0; (izt < nzt) && (result == 0); izt += 1) - if (nthreads > 0) - do - dfr = ThreadGroupGetDFR(threadGroupID, 1000) - if (DatafolderRefStatus(dfr) != 0) - break - endif - if (progress) - if (update_progress_panel(izt)) - result = -4 // user abort - break + if (nthreads > 0) + // send to thread group + WaveClear image + ThreadGroupPutDF threadGroupID, : + else + // process immediately in single-thread mode + processing_folders[n_sent] = GetDataFolderDFR() + string param = func_param + wave /wave reduced_waves = reduction_func(image, param) + variable /g func_result = numpnts(reduced_waves) + adh5_get_result_waves(reduced_waves, "redw_", 0) + WaveClear image, reduced_waves + setdatafolder :: endif - endif - while (1) - else - dfr = processing_folders[izt] - if (progress) - if (update_progress_panel(izt)) - result = -4 // user abort - break + + iz += 1 + n_sent += 1 + tmo = 0 + else + iz += 1 + it = 0 endif endif + else + // throttle the loop if processing is slow + tmo = min(100, tmo + 10) endif - - if (result != 0) - break - endif - - nvar rr = dfr:r_index - nvar ss = dfr:s_index - nvar func_result = dfr:func_result - if (func_result < 1) - result = -3 // dimension reduction error + // receive a slab from the processing queue + if (n_recvd < nzt) + if (nthreads > 0) + dfr = ThreadGroupGetDFR(threadGroupID, tmo) + else + dfr = processing_folders[n_recvd] + processing_folders[n_recvd] = $"" + endif + + if (DatafolderRefStatus(dfr) != 0) + // access results folder + nvar rr = dfr:r_index + nvar ss = dfr:s_index + nvar func_result = dfr:func_result + + if (func_result < 1) + print "error during data reduction." + result = -3 + break + endif + + // initialize result waves just once + if (numpnts(result_waves) == 0) + redimension /n=(func_result) result_waves + for (iw = 0; iw < func_result; iw += 1) + sw = "redw_" + num2str(iw) + wave profile = dfr:$sw + sw = "ReducedData" + num2str(iw+1) + make /n=(dimsize(profile, 0), nz, nt) /d /o $sw + wave data = $sw + setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data + setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data + setscale /p y dimoffset(source, 2), dimdelta(source, 2), waveunits(source, 2), data + setscale /p z dimoffset(source, 3), dimdelta(source, 3), waveunits(source, 3), data + setscale d 0, 0, waveunits(profile, -1), data + note data, note(profile) + result_waves[iw] = data + endfor + endif + + // copy results + for (iw = 0; iw < func_result; iw += 1) + sw = "redw_" + num2str(iw) + wave profile = dfr:$sw + wave data = result_waves[iw] + data[][rr][ss] = profile[p] + endfor + + n_recvd += 1 + KillDataFolder /Z dfr + endif + else + // processing complete break endif - if (numpnts(result_waves) == 0) - redimension /n=(func_result) result_waves - for (iw = 0; iw < func_result; iw += 1) - sw = "redw_" + num2str(iw) - wave profile = dfr:$sw - sw = result_prefix + num2str(iw+1) - make /n=(dimsize(profile, 0), nz, nt) /d /o $sw - wave data = $sw - setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data - setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data - setscale /p y dimoffset(source, 2), dimdelta(source, 2), waveunits(source, 2), data - setscale /p z dimoffset(source, 3), dimdelta(source, 3), waveunits(source, 3), data - setscale d 0, 0, waveunits(profile, -1), data - result_waves[iw] = data - endfor + // update progress window + if (progress) + if (update_progress_panel(n_recvd)) + print "user abort" + result = -4 + break + endif endif - for (iw = 0; iw < func_result; iw += 1) - sw = "redw_" + num2str(iw) - wave profile = dfr:$sw - wave data = result_waves[iw] - data[][rr][ss] = profile[p] - endfor - endfor - + while ((n_recvd < nzt) && (result == 0)) + + // clean up if (nthreads > 0) variable tstatus = ThreadGroupRelease(threadGroupID) if (tstatus == -2) - result = -5 // thread did not terminate properly + print "error: thread did not terminate properly." + result = -5 endif - else - for (izt = 0; izt < nzt; izt += 1) - KillDataFolder /Z processing_folders[izt] - endfor endif + // finalize results + nw = numpnts(result_waves) + wavenames = "" + for (iw = 0; iw < nw; iw += 1) + wave /z data = result_waves[iw] + if (WaveExists(data)) + if (nz == 1) + redimension /n=(-1, 0, 0) data + elseif (nt == 1) + redimension /n=(-1, nz, 0) data + endif + wavenames += nameofwave(data) + ";" + endif + endfor + if (progress) kill_progress_panel() endif + setdatafolder base_df return result end @@ -1979,12 +2011,12 @@ function adh5_scale_scienta(data) case 1: // Angular45 ALow = -45/2 AHigh = +45/2 - AUnit = "°" + AUnit = "°" break case 2: // Angular60 ALow = -60/2 AHigh = +60/2 - AUnit = "°" + AUnit = "°" break endswitch endif diff --git a/pearl/pearl-data-explorer.ipf b/pearl/pearl-data-explorer.ipf index f2ed939..69ff26a 100644 --- a/pearl/pearl-data-explorer.ipf +++ b/pearl/pearl-data-explorer.ipf @@ -2,17 +2,15 @@ #pragma rtGlobals=3 // Use modern global access method and strict wave access. #pragma IgorVersion = 6.36 #pragma ModuleName = PearlDataExplorer -#pragma version = 1.60 +#pragma version = 2.1 +#include , version >= 1.14 #include "pearl-area-import" #include "pearl-area-profiles" #include "pearl-area-display" #include "pearl-compat" #include "pearl-pshell-import" -#if exists("MFR_OpenResultFile") -#include "pearl-matrix-import" -#endif -// copyright (c) 2013-20 Paul Scherrer Institut +// copyright (c) 2013-22 Paul Scherrer Institut // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28,7 +26,6 @@ /// @arg area detector (HDF5) files from scienta analyser and prosilica cameras (if HDF5.xop is installed). /// @arg igor text files from s-scans and otf-scans. /// @arg pshell (HDF5) data files (if HDF5.xop is installed). -/// @arg matrix STM files (if MatrixFileReader.xop is installed). /// @namespace PearlDataExplorer /// @brief preview and import panel for PEARL data @@ -41,27 +38,33 @@ static strconstant package_path = "root:packages:pearl_explorer:" static strconstant ks_filematch_adh5 = "*.h5" static strconstant ks_filematch_pshell = "psh*.h5" static strconstant ks_filematch_itx = "*.itx" -static strconstant ks_filematch_mtrx = "*_mtrx" +/// show the pearl data explorer window +/// +/// create a pearl data explorer window or bring the existing one to the front. +/// if a new window is created, also initialize all package variables and load package preferences. +/// function pearl_data_explorer() - init_package() - load_prefs() - execute /q/z "PearlDataExplorer()" + DoWindow /HIDE=0 /F PearlDataExplorer + if (v_flag == 0) + init_package() + load_prefs() + execute /q/z "PearlDataExplorer()" + MakeListIntoHierarchicalList("PearlDataExplorer", "lb_contents", "PearlDataExplorer#hlp_contents_open", selectionMode=WMHL_SelectionNonContinguous, pathSeparator="/") + WMHL_AddColumns("PearlDataExplorer", "lb_contents", 1) + WMHL_SetNotificationProc("PearlDataExplorer", "lb_contents", "PearlDataExplorer#hlp_contents_selection", WMHL_SetSelectNotificationProc) + ListBox lb_contents win=PearlDataExplorer, widths={6,60,20} + update_controls() + endif end -/// initialize the global variables of the data explorer. -/// -/// initializes the global variables and data folder for this procedure file -/// must be called once before the panel is created -/// warning: this function overwrites previous values static function init_package() - - dfref savefolder = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() SetDataFolder root: newdatafolder /o/s packages newdatafolder /o/s $package_name if (exists("v_InitPanelDone") == 2) - SetDataFolder savefolder + SetDataFolder save_df return 0 endif @@ -83,16 +86,49 @@ static function init_package() // the list items can contain wildcards for StringMatch s_preview_pvs = "*OP:CURRENT*;*Stats*Total*;*KEITHLEY*READOUT;*CADC*" + redimension /n=26 attr_filter_summary + attr_filter_summary[0] = "MonoEnergy" + attr_filter_summary[1] = "MonoGrating" + attr_filter_summary[2] = "ExitSlit" + attr_filter_summary[3] = "FrontendHSize" + attr_filter_summary[4] = "FrontendVSize" + attr_filter_summary[5] = "ManipulatorPhi" + attr_filter_summary[6] = "ManipulatorTheta" + attr_filter_summary[7] = "ManipulatorTilt" + attr_filter_summary[8] = "ManipulatorX" + attr_filter_summary[9] = "ManipulatorY" + attr_filter_summary[10] = "ManipulatorZ" + attr_filter_summary[11] = "PassEnergy" + attr_filter_summary[12] = "LensMode" + attr_filter_summary[13] = "ScientaDwellTime" + attr_filter_summary[14] = "ScientaCenterEnergy" + attr_filter_summary[15] = "ScientaChannelBegin" + attr_filter_summary[16] = "ScientaChannelEnd" + attr_filter_summary[17] = "ScientaSliceBegin" + attr_filter_summary[18] = "ScientaSliceEnd" + attr_filter_summary[19] = "ScientaNumChannels" + attr_filter_summary[20] = "StepSize" + attr_filter_summary[21] = "ScientaNumSlices" + attr_filter_summary[22] = "ManipulatorTempA" + attr_filter_summary[23] = "ManipulatorTempB" + attr_filter_summary[24] = "RefCurrent" + attr_filter_summary[25] = "SampleCurrent" + // non-persistent strings and variables + string /g s_short_filepath = "" // abbreviated directory path + string /g s_selected_file = "" + string /g s_selected_dataset = "" string /g s_preview_file = "" // file or folder name of the current preview string /g s_preview_source = "" // data source, e.g. EPICS channel name, of the current preview string /g s_profiles_graph = "" // window name of the current preview if the data is two-dimensional string /g s_preview_trace_graph = "" // window name of the current preview if the data is one-dimensional + string /g s_preview_graph = "" // window name of the most recent preview graph string /g s_file_info = "" // description of selected file + string /g s_result = "" // result of last operation variable/g v_InitPanelDone = 1 - SetDataFolder savefolder + SetDataFolder save_df end /// save persistent package data to the preferences file. @@ -101,7 +137,7 @@ end /// the data saved in the file are: data file path, attributes filter /// static function save_prefs() - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() dfref df = $package_path if (DataFolderRefStatus(df) == 1) string fullPath = SpecialDirPath("Packages", 0, 0, 0) @@ -113,19 +149,19 @@ static function save_prefs() SaveData /O /Q /J=objects fullPath KillPath/Z tempPackagePrefsPath endif - SetDataFolder saveDF + SetDataFolder save_df end static function load_prefs() // loads persistent package data from the preferences file // the preferences file is an Igor packed experiment file in a special preferences folder - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() variable result = -1 setdatafolder root: NewDataFolder /O/S packages NewDataFolder /O/S $package_name - dfref packageDF = GetDataFolderDFR() + dfref package_df = GetDataFolderDFR() string fullPath = SpecialDirPath("Packages", 0, 0, 0) fullPath += package_name @@ -140,13 +176,13 @@ static function load_prefs() endif if (result == 0) - svar /sdfr=packageDF filepath = s_filepath + svar /sdfr=package_df filepath = s_filepath NewPath /O/Z pearl_explorer_filepath, filepath + update_filepath() update_filelist() - update_datasets() endif - SetDataFolder saveDF + SetDataFolder save_df return result end @@ -159,7 +195,6 @@ end /// @arg 1 PShell file (HDF5, name starts with psh_) /// @arg 2 area detector HDF5 file /// @arg 3 Igor text (itx) file -/// @arg 4 Matrix STM file (*_mtrx) /// static function pearl_file_type(filename) string filename @@ -170,22 +205,31 @@ static function pearl_file_type(filename) return 2 elseif (StringMatch(filename, ks_filematch_itx)) return 3 -#if exists("MFR_OpenResultFile") - elseif (StringMatch(filename, ks_filematch_mtrx)) - return 4 -#endif else return 0 endif end +/// update the file path after path change +/// +/// read the path info from pearl_explorer_filepath +/// and update the path control +/// +static function update_filepath() + PathInfo /S pearl_explorer_filepath + svar filepath = $(package_path + "s_filepath") + svar shortpath = $(package_path + "s_short_filepath") + filepath = s_path + shortpath = shorten_filepath(filepath, 40) +end + /// read a list of PEARL files from the file system /// /// wtFiles and wSelectedFiles in the package data folder are updated. /// only files for which pearl_file_type() returns non-zero are listed. /// static function update_filelist() - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() string all_files wave /t wtFiles = $(package_path + "wtFiles") @@ -209,500 +253,112 @@ static function update_filelist() redimension /n=(numpnts(wtFiles)) wSelectedFiles wSelectedFiles = 0 - setdatafolder saveDF + setdatafolder save_df end -static function update_datasets() - // updates the list of imported datasets. - // a dataset means any top-level data folder - // which includes a string variable named pearl_explorer_import. - dfref saveDF = GetDataFolderDFR() +// ====== metadata ====== - setdatafolder root: - dfref rootdf = GetDataFolderDFR() - setdatafolder $package_path - dfref privatedf = GetDataFolderDFR() - - wave /t wtDatasets - wave wSelectedDatasets - variable maxdf = CountObjectsDFR(rootdf, 4) - redimension /n=(maxdf) wtDatasets - - variable idf = 0 - variable ndf = 0 - string sdf - - do - sdf = GetIndexedObjNameDFR(rootdf, 4, idf) - if (strlen(sdf) >= 1) - setdatafolder rootdf - setdatafolder $sdf - svar /z importer = pearl_explorer_import - if (svar_exists(importer)) - wtDatasets[ndf] = sdf - ndf += 1 - endif - else - break - endif - idf += 1 - while(1) - - redimension /n=(ndf) wtDatasets, wSelectedDatasets - wSelectedDatasets = 0 - sort wtDatasets, wtDatasets - - setdatafolder saveDF -end - -static function preview_file(filename) +/// load the internal structure of a file +/// +/// this loads metadata for updating the panels. +/// +/// for a pshell file, metadata includes: +/// - list of all datasets with types and dimensions +/// - general group +/// +/// @return 0 if successful +/// -1 if no data was loaded because the file was not recognized, +/// -2 if no data is found in file +/// +static function get_file_info(filename) string filename - dfref saveDF = GetDataFolderDFR() - dfref previewDF = $package_path - - killStrings /z authors, pgroup, proposal, proposer, sample + dfref save_df = GetDataFolderDFR() + dfref package_df = $package_path variable ft = pearl_file_type(filename) + variable result = 0 + switch(ft) case 1: - wave /z image = preview_pshell_file(filename) - break case 2: - wave /z image = preview_hdf_file(filename) - break - case 3: - wave /z image = preview_itx_file(filename) - break - case 4: - wave /z image = preview_mtrx_file(filename) + dfref file_df = get_pshell_info("pearl_explorer_filepath", filename) + result = hl_contents_update(file_df) + result = result >= 3 ? 0 : -2 break default: - wave /z image = $"" + hl_contents_clear() + dfref file_df = package_df:file_info + KillDataFolder /z file_df + result = -1 endswitch - if (WaveExists(image)) - string graphname = show_preview_graph(image) - // preset ELOG panel - if available - if (exists("PearlElog#set_panel_attributes") == 6) - string cmd - sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"File=%s\")", ParseFilePath(0, filename, ":", 1, 0) - execute /Q/Z cmd - if (strlen(graphname) > 0) - sprintf cmd, "PearlElog#set_panel_graphs(\"\", \"%s\")", graphname - execute /Q/Z cmd - endif - svar /sdfr=previewDF /z authors - if (svar_Exists(authors)) - if (strlen(authors)>=1) - sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"author=%s\")", authors - execute /Q/Z cmd - endif - endif - svar /sdfr=previewDF /z pgroup - if (svar_Exists(pgroup)) - if (strlen(pgroup)>=1) - sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"p-group=%s\")", pgroup - execute /Q/Z cmd - endif - endif - svar /sdfr=previewDF /z proposal - if (svar_Exists(proposal)) - if (strlen(proposal)>=1) - sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"project=%s\")", proposal - execute /Q/Z cmd - endif - endif - svar /sdfr=previewDF /z proposer - svar /sdfr=previewDF /z sample - if (svar_Exists(sample)) - if (strlen(sample)>=1) - sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"sample=%s\")", sample - execute /Q/Z cmd - endif - endif + setdatafolder save_df + return result +end + +/// load attributes +static function attributes_notebook(filename) + string filename + + dfref save_df = GetDataFolderDFR() + dfref temp_df = NewFreeDataFolder() + + load_file(filename, options="mode:load_diags", dest_df=temp_df, quiet=1) + svar /sdfr=temp_df /z s_loaded_datasets + string scan + dfref scan_df + if (SVAR_Exists(s_loaded_datasets) && (strlen(s_loaded_datasets) >= 4)) + scan = StringFromList(0, psh5_extract_scan_paths(s_loaded_datasets), ";") + scan_df = psh5_dataset_to_folder(temp_df, scan) + else + scan_df = temp_df + endif + + dfref attr_df = ps_find_attr_folder(scan_df) + if (DataFolderRefStatus(attr_df)) + extract_attributes(attr_df, dest_df=temp_df) + wave /t /sdfr=temp_df /z attr_names + wave /t /sdfr=temp_df /z attr_values + if (WaveExists(attr_names) && WaveExists(attr_values)) + create_attributes_notebook(attr_names, attr_values, filename) endif endif - setdatafolder saveDF - return 0 + setdatafolder save_df end -/// load the preview of a PShell HDF5 file. +/// extract summary from attribute waves /// -/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). -/// the preview is loaded to the preview_image wave in the pear_explorer data folder. +/// by default, all existing attributes are copied. +/// if a text wave attr_filter exists in the pear_explorer folder, only the attributes referenced therein are copied. +/// to set up a filter, duplicate the attr_names wave of a template dataset, and remove unwanted items. /// -/// the s_file_info string is updated with information about the scan dimensions. +/// @param attr_df data folder which contains the original data, e.g. the attr, diags or snaps folder in pshell files. +/// @param dest_df destination folder. the output is written to the attr_names and attr_values waves. +/// default = package folder. +/// @param attr_filter (text wave) list of attributes allowed in the output. +/// default = use attr_filter of package folder. +/// @param include_datawaves @arg 1 (default) include data waves (any numeric wave which has a PV=name note). +/// @arg 0 don't include attributes from data waves. +/// @param include_infowaves @arg 1 (default) include attributes from info waves (IN, ID, IV, IU). +/// @arg 0 don't include attributes from info waves. /// -/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. -/// -/// @return wave reference of the preview image -/// -static function /wave preview_pshell_file(filename) - string filename - - dfref saveDF = GetDataFolderDFR() - - setdatafolder $package_path - dfref previewDF = GetDataFolderDFR() - svar s_preview_file - svar s_preview_source - svar /z s_file_info - if (! svar_exists(s_file_info)) - string /g s_file_info +static function extract_attributes(attr_df, [dest_df, attr_filter, include_datawaves, include_infowaves]) + dfref attr_df + dfref dest_df + wave /t attr_filter + variable include_datawaves + variable include_infowaves + + dfref save_df = GetDataFolderDFR() + dfref package_df = $package_path + + if (ParamIsDefault(dest_df)) + dest_df = GetDataFolderDFR() endif - - dfref tempDF = NewFreeDataFolder() - setdatafolder tempDF - string dataname - dataname = psh5_load_preview("pearl_explorer_filepath", filename) - - s_preview_file = filename - s_preview_source = "" - - wave /z data = $dataname - if (waveexists(data)) - duplicate /o data, previewDF:preview_image - else - print "no data found in file " + filename - endif - - if (strlen(s_preview_file) > 0) - s_file_info = psh5_load_info("pearl_explorer_filepath", filename) - setdatafolder previewDF - psh5_load_general_group("pearl_explorer_filepath", filename) - else - s_file_info = "" - endif - - dfref attrDF = tempDF:attr - if (DataFolderRefStatus(attrDF)) - preview_attributes(attrDF) - endif - - setdatafolder saveDF - wave /z /sdfr=previewDF preview_image - return preview_image -end - -/// load the preview of a PEARL HDF5 file. -/// -/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). -/// the preview is loaded to the preview_image wave in the pear_explorer data folder. -/// -/// the s_file_info string is updated with information about the scan dimensions. -/// -/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. -/// -/// @return wave reference of the preview image -/// -static function /wave preview_hdf_file(filename) - string filename - - dfref saveDF = GetDataFolderDFR() - setdatafolder $package_path - svar s_preview_file - svar s_preview_source - adh5_load_preview("preview_image", "pearl_explorer_filepath", filename) - s_preview_file = filename - s_preview_source = "" - wave /z preview_image - - svar /z s_file_info - if (! svar_exists(s_file_info)) - string /g s_file_info - endif - if (strlen(s_preview_file) > 0) - s_file_info = adh5_load_info("pearl_explorer_filepath", filename) - else - s_file_info = "" - endif - - if (DataFolderExists("attr")) - setdatafolder attr - preview_attributes(GetDataFolderDFR()) - setdatafolder :: - endif - - setdatafolder saveDF - return preview_image -end - -/// load the preview of a general ITX file. -/// -/// the function is designed for PEARL OTF and EPICS scan data converted from MDA files. -/// the function picks the first wave whose PV note matches one from the global string s_preview_pvs -/// (see @ref preview_datafolder and @ref init_package). -/// -/// the preview is loaded to the preview_image wave in the pearl_explorer data folder. -/// the s_file_info string is updated with information about the scan dimensions. -/// -/// @note: the ITX files should load their waves into the current data folder (a "free" data folder). -/// some early versions of PEARL ITX data files created a data folder of their own. -/// both ways are allowed, while the first one is preferred. -/// on return, the current data folder must point to either the original free folder or the newly created one. -/// -/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. -/// -/// @return wave reference of the preview trace. -/// empty wave reference if the function failed. -/// -static function /wave preview_itx_file(filename) - string filename - - dfref saveDF = GetDataFolderDFR() - setdatafolder $package_path - svar s_preview_file - svar s_preview_source - wave preview_image - - dfref dataDF = newfreedatafolder() - setdatafolder dataDF - LoadWave /t/p=pearl_explorer_filepath/q filename - s_preview_file = s_filename - s_preview_source = "" - - preview_datafolder() - preview_attributes(dataDF, include_datawaves=0) - - setdatafolder saveDF - return preview_image -end - -/// load the preview of a Matrix STM file. -/// -/// the preview is loaded to the preview_image wave in the pearl_explorer data folder. -/// -/// the s_file_info string is updated with information about the scan dimensions. -/// -/// this function requires the MatrixFileReader.xop and pearl-matrix-import.ipf to be loaded. -/// otherwise it will return an empty wave reference. -/// -/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. -/// -/// @return wave reference of the preview image. -/// empty wave reference if the function failed. -/// -static function /wave preview_mtrx_file(filename) - string filename - -#if exists("MFR_OpenResultFile") - dfref saveDF = GetDataFolderDFR() - setdatafolder $package_path - variable /g V_MatrixFileReaderOverwrite = 1 - variable /g V_MatrixFileReaderFolder = 0 - variable /g V_MatrixFileReaderDouble = 0 - svar s_preview_file - svar s_preview_source - string datanames - string dataname - datanames = mtrx_load_preview("preview", "pearl_explorer_filepath", filename) - if (strlen(datanames) > 0) - s_preview_file = filename - - dataname = StringFromList(0, datanames) - wave data = $dataname - duplicate /o $dataname, preview_image - s_preview_source = StringByKey("Dataset", note(data), "=", "\r") - - svar /z s_file_info - if (svar_exists(s_file_info)) - s_file_info = "" - endif - - variable i - variable n = ItemsInList(datanames) - string s - for (i = 0; i < n; i += 1) - s = StringFromList(i, datanames) - killwaves /z $s - endfor - endif - wave /z preview_image - setdatafolder saveDF -#else - wave /z preview_image = $"" -#endif - return preview_image -end - -static function extract_preview_image(data, preview) - // extracts a preview image from a wave of arbitrary dimension - wave data - wave preview - - variable z1, z2 - - // extract image - switch (WaveDims(data)) - case 1: - redimension /n=(numpnts(data)) preview - preview = data[p] - break - case 2: - redimension /n=(dimsize(data, 0), dimsize(data, 1)) preview - preview = data - break - case 3: - redimension /n=(dimsize(data, 0), dimsize(data, 1)) preview - z1 = floor(DimSize(data, 2) / 2) - z2 = z1 - wave slab = ad_extract_slab(data, nan, nan, nan, nan, z1, z2, "", pscale=1) - preview = slab - break - case 4: - // not implemented - endswitch - - switch (WaveDims(data)) - case 4: - case 3: - case 2: - setscale /p y dimoffset(data, 1), dimdelta(data, 1), waveunits(data, 1), preview - case 1: - setscale /p x dimoffset(data, 0), dimdelta(data, 0), waveunits(data, 0), preview - setscale d 0, 0, waveunits(data, -1), preview - endswitch -end - -static function preview_dataset(datasetname) - string datasetname // name of a data folder under root - - dfref saveDF = GetDataFolderDFR() - - if (!DataFolderExists("root:" + datasetname)) - return -1 - endif - setdatafolder root: - setdatafolder $datasetname - dfref datadf = GetDataFolderDFR() - wave /z data - - setdatafolder $package_path - svar s_preview_file - svar s_preview_source - wave preview_image - if (WaveExists(data)) - s_preview_file = datasetname - s_preview_source = "" - extract_preview_image(data, preview_image) - show_preview_graph(preview_image) - else - preview_image = nan - s_preview_file = datasetname - setdatafolder datadf - preview_datafolder() - show_preview_graph(preview_image) - endif - - // attributes - setdatafolder datadf - if (DataFolderExists("attr")) - setdatafolder attr - preview_attributes(GetDataFolderDFR()) - else - preview_attributes(GetDataFolderDFR(), include_datawaves=0) - endif - - setdatafolder saveDF - return 0 -end - -static function preview_datafolder() - // preview data in the current data folder - dfref saveDF = GetDataFolderDFR() - - setdatafolder $package_path - svar s_preview_file - svar s_preview_source - svar s_preview_pvs - wave preview_image - - setdatafolder saveDF - - // select a wave to display - // consider only double-precision waves, i.e. ignore text and other special waves - // filter by matching PV name to s_preview_pvs - string d_names = WaveList("*", ";", "DP:1") - variable nw = ItemsInList(d_names, ";") - variable npv = ItemsInList(s_preview_pvs, ";") - variable iw, ipv - string wname, wnote, pv_name, pv_match - for (iw = 0; iw < nw; iw += 1) - wname = StringFromList(iw, d_names, ";") - wnote = note($wname) - pv_name = StringByKey("PV", wnote, "=", "\r") - // find matching data wave by PV name - for (ipv = 0; ipv < npv; ipv += 1) - pv_match = StringFromList(ipv, s_preview_pvs) - if (StringMatch(pv_name, pv_match)) - wave data = $wname - s_preview_source = pv_name - extract_preview_image(data, preview_image) - preview_setscale_x(data, preview_image) - npv = 0 - nw = 0 - endif - endfor - endfor - - setdatafolder saveDF -end - -static function preview_setscale_x(data, preview) - // sets the approximate x scale of OTF data. - // requires an Axis1 tag with name of x wave in the wave note. - // if any of these conditions is true, the function does not change the scaling: - // 1) Axis1 tag or referenced wave is missing. - // 2) preview wave is not set to point scaling. - // 3) x wave is not monotonic (90% of the steps in the same direction). - wave data - wave preview - - if ((DimOffset(preview, 0) == 0) && (DimDelta(preview, 0) == 1)) - string xname = StringByKey("Axis1", note(data), "=", "\r") - wave /z xwave = $xname - if (WaveExists(xwave)) - // check for monotonicity - variable monotonic = 0 - duplicate /free xwave, xdiff - differentiate /p xwave /D=xdiff - duplicate /free xdiff, xflag - xflag = xdiff > 0 - monotonic = sum(xflag) > numpnts(xwave) * 0.9 - xflag = xdiff < 0 - monotonic = monotonic || (sum(xflag) > numpnts(xwave) * 0.9) - if (monotonic) - setscale /i x xwave[0], xwave[numpnts(xwave)-1], waveunits(xwave, -1), preview - endif - endif - endif -end - -static function preview_attributes(attr_folder, [dest_folder, attr_filter, include_datawaves, include_infowaves]) - // copies the first elements of attributes in the specified folder to the preview waves - // by default, all existing attributes are copied - // if a text wave attr_filter exists in the pear_explorer folder, only the attributes referenced therein are copied - // to set up a filter, duplicate the attr_names wave of a template dataset, and remove unwanted items - dfref attr_folder // data folder which contains the attribute waves - dfref dest_folder // destination folder. the output is written to the attr_names and attr_values waves - // default = package folder - wave /t attr_filter // list of attributes allowed in the output - // default = use attr_filter of package folder - variable include_datawaves // 1 (default) = include data waves (any numeric wave which has a PV=name note) - // 0 = don't include attributes from data waves - variable include_infowaves // 1 (default) = include attributes from info waves (IN, ID, IV, IU) - // 0 = don't include attributes from info waves - - dfref saveDF = GetDataFolderDFR() - setdatafolder $package_path - - if (ParamIsDefault(dest_folder)) - dest_folder = GetDataFolderDFR() // package folder - endif - if (ParamIsDefault(attr_filter)) - wave /t /z attr_filter + if (ParamIsDefault(attr_filter) || !WaveExists(attr_filter)) + wave /t /sdfr=package_df /z attr_filter endif if (ParamIsDefault(include_datawaves)) include_datawaves = 1 @@ -711,7 +367,7 @@ static function preview_attributes(attr_folder, [dest_folder, attr_filter, inclu include_infowaves = 1 endif - setdatafolder dest_folder + setdatafolder dest_df wave /t /z attr_names, attr_values if (!WaveExists(attr_names) || !WaveExists(attr_values)) make /n=(1) /o /t attr_names, attr_values @@ -719,7 +375,7 @@ static function preview_attributes(attr_folder, [dest_folder, attr_filter, inclu attr_names = "" attr_values = "" - setdatafolder attr_folder + setdatafolder attr_df wave /t /z IN wave /t /z ID wave /t /z IV @@ -811,55 +467,22 @@ static function preview_attributes(attr_folder, [dest_folder, attr_filter, inclu endif endfor - setdatafolder saveDF -end - -static function display_dataset(datasetname) - // displays the graph of a loaded dataset in its own window - string datasetname // name of a data folder under root - - dfref saveDF = GetDataFolderDFR() - - if (!DataFolderExists("root:" + datasetname)) - return -1 - endif - setdatafolder root: - setdatafolder $datasetname - dfref datadf = GetDataFolderDFR() - wave /z data - if (!WaveExists(data)) - wave /z data = data1 - endif - - if (WaveExists(data)) - switch(WaveDims(data)) - case 2: - ad_display_profiles(data) - break - case 3: - ad_display_slice(data) - ad_brick_slicer(data) - break - endswitch - endif - - setdatafolder saveDF - return 0 + setdatafolder save_df end function test_attributes_notebook() dfref df = GetDataFolderDFR() wave /t /sdfr=df attr_names wave /t /sdfr=df attr_values - attributes_notebook(attr_names, attr_values, GetDataFolder(0)) + create_attributes_notebook(attr_names, attr_values, GetDataFolder(0)) end -static function attributes_notebook(attr_names, attr_values, title) +static function create_attributes_notebook(attr_names, attr_values, title) wave /t attr_names wave /t attr_values string title - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() setdatafolder $package_path wave /t/z attr_filter, attr_filter_summary @@ -890,7 +513,7 @@ static function attributes_notebook(attr_names, attr_values, title) notebook_add_attributes(name, $"", attr_names, attr_values) notebook $name selection={startOfFile,startOfFile}, findText={"",1} - setdatafolder saveDF + setdatafolder save_df end static function notebook_add_attributes(notebook_name, attr_filter, attr_names, attr_values) @@ -920,18 +543,398 @@ static function notebook_add_attributes(notebook_name, attr_filter, attr_names, endfor end +/// send general metadata to ELOG panel - if available +/// +/// the following metatdata are sent. +/// they must be present as strings in the specified data folder: +/// +/// | ELOG parameter | global string | function argument | +/// | --- | --- | --- | +/// | file | s_filepath | filename | +/// | graph attachment | | graphname | +/// | author | authors | | +/// | p-group | pgroup | | +/// | project | proposal | | +/// | sample | sample | | +/// +/// @param file_df data folder that contains the metadata. +/// +/// @param filename override file path read from s_filepath global string variable. +/// if neither is declared, the file name is reset to empty field. +/// +/// @param graphname select this graph window for attaching. +/// default: do not change the selection. +/// +static function set_elog_attributes(file_df, [filename, graphname]) + dfref file_df + string filename + string graphname + + if (ParamIsDefault(filename)) + svar /sdfr=file_df /z loaded_file=s_filepath + if (svar_Exists(loaded_file)) + filename = loaded_file + else + filename = "" + endif + endif + + if (ParamIsDefault(graphname)) + graphname = "" + endif + + string cmd + + if (exists("PearlElog#set_panel_attributes") == 6) + sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"File=%s\")", ParseFilePath(0, filename, ":", 1, 0) + execute /Q/Z cmd + if ((strlen(graphname) > 0) && (WinType(graphname) == 1)) + sprintf cmd, "PearlElog#set_panel_graphs(\"\", \"%s\")", graphname + execute /Q/Z cmd + endif + svar /sdfr=file_df /z authors + if (svar_Exists(authors)) + if (strlen(authors)>=1) + sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"author=%s\")", authors + execute /Q/Z cmd + endif + endif + svar /sdfr=file_df /z pgroup + if (svar_Exists(pgroup)) + if (strlen(pgroup)>=1) + sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"p-group=%s\")", pgroup + execute /Q/Z cmd + endif + endif + svar /sdfr=file_df /z proposal + if (svar_Exists(proposal)) + if (strlen(proposal)>=1) + sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"project=%s\")", proposal + execute /Q/Z cmd + endif + endif + svar /sdfr=file_df /z proposer + svar /sdfr=file_df /z sample + if (svar_Exists(sample)) + if (strlen(sample)>=1) + sprintf cmd, "PearlElog#set_panel_attributes(\"\", \"sample=%s\")", sample + execute /Q/Z cmd + endif + endif + endif +end + +// ====== preview ====== + +static function preview_file(filename) + string filename + + dfref save_df = GetDataFolderDFR() + dfref preview_df = $package_path + + killStrings /z authors, pgroup, proposal, proposer, sample + + variable ft = pearl_file_type(filename) + switch(ft) + case 1: + wave /z image = preview_pshell_file(filename) + break + case 2: + wave /z image = preview_hdf_file(filename) + break + case 3: + wave /z image = preview_itx_file(filename) + break + default: + wave /z image = $"" + endswitch + + if (WaveExists(image)) + show_preview_graph(image) + endif + + setdatafolder save_df + return 0 +end + +/// load the preview of a PShell HDF5 file. +/// +/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). +/// the preview is loaded to the preview_image wave in the pear_explorer data folder. +/// +/// the s_file_info string is updated with information about the scan dimensions. +/// +/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. +/// +/// @return wave reference of the preview image +/// +static function /wave preview_pshell_file(filename) + string filename + + dfref save_df = GetDataFolderDFR() + + setdatafolder $package_path + dfref preview_df = GetDataFolderDFR() + svar s_preview_file + svar s_preview_source + svar /z s_file_info + if (! svar_exists(s_file_info)) + string /g s_file_info + endif + + dfref temp_df = NewFreeDataFolder() + dfref file_df = psh5_preview("pearl_explorer_filepath", filename, dest_df=temp_df) + svar /z /sdfr=temp_df dataname=s_preview_wave + + s_preview_file = filename + s_preview_source = "" + + wave /z /sdfr=temp_df data = $dataname + if (waveexists(data)) + duplicate /o data, preview_df:preview_image + else + print "no data found in file " + filename + endif + + s_file_info = "" + + setdatafolder save_df + wave /z /sdfr=preview_df preview_image + return preview_image +end + +/// load the preview of a PEARL HDF5 file. +/// +/// the preview is an arbitrary detector image extracted from the file, see adh5_load_preview(). +/// the preview is loaded to the preview_image wave in the pear_explorer data folder. +/// +/// the s_file_info string is updated with information about the scan dimensions. +/// +/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. +/// +/// @return wave reference of the preview image +/// +static function /wave preview_hdf_file(filename) + string filename + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar s_preview_file + svar s_preview_source + adh5_load_preview("preview_image", "pearl_explorer_filepath", filename) + s_preview_file = filename + s_preview_source = "" + wave /z preview_image + + svar /z s_file_info + if (! svar_exists(s_file_info)) + string /g s_file_info + endif + if (strlen(s_preview_file) > 0) + s_file_info = adh5_load_info("pearl_explorer_filepath", filename) + else + s_file_info = "" + endif + + setdatafolder save_df + return preview_image +end + +/// load the preview of a general ITX file. +/// +/// the function is designed for PEARL OTF and EPICS scan data converted from MDA files. +/// the function picks the first wave whose PV note matches one from the global string s_preview_pvs +/// (see @ref preview_datafolder and @ref init_package). +/// +/// the preview is loaded to the preview_image wave in the pearl_explorer data folder. +/// the s_file_info string is updated with information about the scan dimensions. +/// +/// @note: the ITX files should load their waves into the current data folder (a "free" data folder). +/// some early versions of PEARL ITX data files created a data folder of their own. +/// both ways are allowed, while the first one is preferred. +/// on return, the current data folder must point to either the original free folder or the newly created one. +/// +/// @param filename name of a file in the directory specified by the pearl_explorer_filepath path object. +/// +/// @return wave reference of the preview trace. +/// empty wave reference if the function failed. +/// +static function /wave preview_itx_file(filename) + string filename + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar s_preview_file + svar s_preview_source + wave preview_image + + dfref data_df = newfreedatafolder() + setdatafolder data_df + LoadWave /t/p=pearl_explorer_filepath/q filename + s_preview_file = s_filename + s_preview_source = "" + + preview_datafolder() + + setdatafolder save_df + return preview_image +end + +/// extract a preview image from a wave of arbitrary dimension +static function extract_preview_image(data, preview) + wave data + wave preview + + variable z1, z2 + + // extract image + switch (WaveDims(data)) + case 1: + redimension /n=(numpnts(data)) preview + preview = data[p] + break + case 2: + redimension /n=(dimsize(data, 0), dimsize(data, 1)) preview + preview = data + break + case 3: + redimension /n=(dimsize(data, 0), dimsize(data, 1)) preview + z1 = floor(DimSize(data, 2) / 2) + z2 = z1 + wave slab = ad_extract_slab(data, nan, nan, nan, nan, z1, z2, "", pscale=1) + preview = slab + break + case 4: + // not implemented + endswitch + + switch (WaveDims(data)) + case 4: + case 3: + case 2: + setscale /p y dimoffset(data, 1), dimdelta(data, 1), waveunits(data, 1), preview + case 1: + setscale /p x dimoffset(data, 0), dimdelta(data, 0), waveunits(data, 0), preview + setscale d 0, 0, waveunits(data, -1), preview + endswitch +end + +/// preview data in the current data folder +/// +/// used by preview_itx_file +/// +static function preview_datafolder() + dfref save_df = GetDataFolderDFR() + + setdatafolder $package_path + svar s_preview_file + svar s_preview_source + svar s_preview_pvs + wave preview_image + + setdatafolder save_df + + // select a wave to display + // consider only double-precision waves, i.e. ignore text and other special waves + // filter by matching PV name to s_preview_pvs + string d_names = WaveList("*", ";", "DP:1") + variable nw = ItemsInList(d_names, ";") + variable npv = ItemsInList(s_preview_pvs, ";") + variable iw, ipv + string wname, wnote, pv_name, pv_match + for (iw = 0; iw < nw; iw += 1) + wname = StringFromList(iw, d_names, ";") + wnote = note($wname) + pv_name = StringByKey("PV", wnote, "=", "\r") + // find matching data wave by PV name + for (ipv = 0; ipv < npv; ipv += 1) + pv_match = StringFromList(ipv, s_preview_pvs) + if (StringMatch(pv_name, pv_match)) + wave data = $wname + s_preview_source = pv_name + extract_preview_image(data, preview_image) + preview_setscale_x(data, preview_image) + npv = 0 + nw = 0 + endif + endfor + endfor + + setdatafolder save_df +end + +static function preview_setscale_x(data, preview) + // sets the approximate x scale of OTF data. + // requires an Axis1 tag with name of x wave in the wave note. + // if any of these conditions is true, the function does not change the scaling: + // 1) Axis1 tag or referenced wave is missing. + // 2) preview wave is not set to point scaling. + // 3) x wave is not monotonic (90% of the steps in the same direction). + wave data + wave preview + + if ((DimOffset(preview, 0) == 0) && (DimDelta(preview, 0) == 1)) + string xname = StringByKey("Axis1", note(data), "=", "\r") + wave /z xwave = $xname + if (WaveExists(xwave)) + // check for monotonicity + variable monotonic = 0 + duplicate /free xwave, xdiff + differentiate /p xwave /D=xdiff + duplicate /free xdiff, xflag + xflag = xdiff > 0 + monotonic = sum(xflag) > numpnts(xwave) * 0.9 + xflag = xdiff < 0 + monotonic = monotonic || (sum(xflag) > numpnts(xwave) * 0.9) + if (monotonic) + setscale /i x xwave[0], xwave[numpnts(xwave)-1], waveunits(xwave, -1), preview + endif + endif + endif +end + +/// displays the graph of a loaded dataset in its own window +static function display_dataset(file_df, dataset) + dfref file_df // top data folder of file + string dataset // dataset path inside data folder + + dfref save_df = GetDataFolderDFR() + + dfref data_df = psh5_dataset_to_folder(file_df, dataset) + SetDataFolder data_df + string data_name = StringFromList(ItemsInList(dataset, "/") - 1, dataset, "/") + wave /z data=$data_name + + if (WaveExists(data)) + switch(WaveDims(data)) + case 1: + case 2: + show_preview_graph(data) + break + case 3: + ad_display_slice(data) + ad_brick_slicer(data) + break + endswitch + endif + + setdatafolder save_df + return 0 +end + static function /s show_preview_graph(data, [xdata]) // displays a preview of one- or two-dimensional data wave data // data to be displayed. must either one-dimensional or two-dimensional wave xdata // positions on x axis - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() setdatafolder $package_path svar s_profiles_graph svar s_preview_file svar s_preview_source svar s_preview_trace_graph + svar s_preview_graph if ((strlen(s_profiles_graph) > 0) && (WinType(s_profiles_graph) == 1)) KillWindow $s_profiles_graph @@ -966,16 +969,22 @@ static function /s show_preview_graph(data, [xdata]) title = title + " (" + s_preview_source[0,31] + ")" endif dowindow /f/t $graphname, title + s_preview_graph = graphname - setdatafolder saveDF + setdatafolder save_df return graphname end static function /s display_preview_trace(xtrace, ytrace) - wave xtrace + wave /z xtrace wave ytrace + + if (WaveExists(xtrace)) + display /n=pearl_explorer_1d /k=1 ytrace vs xtrace as "Preview" + else + display /n=pearl_explorer_1d /k=1 ytrace as "Preview" + endif - display /n=pearl_explorer_1d /k=1 ytrace vs xtrace as "Preview" string graphname = s_name ModifyGraph /w=$graphname rgb[0]=(0,0,0) ModifyGraph /w=$graphname grid=2 @@ -1004,6 +1013,8 @@ static function /s display_preview_trace(xtrace, ytrace) return s_name end +// ====== file loading ====== + /// load the selected files /// /// load the files that are selected in the data explorer panel. @@ -1014,7 +1025,7 @@ end static function load_selected_files([options]) string options - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() setdatafolder $package_path wave wSelectedFiles @@ -1023,7 +1034,7 @@ static function load_selected_files([options]) variable ii for (ii = 0; ii < nn; ii += 1) if (wSelectedFiles[ii]) - setdatafolder saveDF + setdatafolder save_df if (ParamIsDefault(options)) load_file(wtFiles[ii]) else @@ -1031,42 +1042,45 @@ static function load_selected_files([options]) endif endif endfor - - update_datasets() end /// load one file /// -/// this can be a PShell, HDF5, ITX or MTRX file. -/// (HDF5 and MTRX files require the corresponding XOP to be loaded - cf. file documentation) +/// this can be a PShell, HDF5, or ITX file. /// /// @note this function may change the current data folder! /// -static function load_file(filename, [options]) +/// @param options `key:value;` list of load options. +/// the recognized keys are: `mode`, `reduction_func` and `reduction_params`. +/// see main text for a description of possible values. +/// by default (options not specified), options are read from `s_hdf_options. +/// if the option string is empty, the user is prompted for options. +/// +/// @param dest_df destination data folder. default: a new folder derived from file name under root. +/// +/// @param quiet @arg 0 (default) print mode and parameters to history. +/// @arg 1 do not print to history. +/// +static function load_file(filename, [options, dest_df, quiet]) string filename string options + dfref dest_df + variable quiet + + if (ParamIsDefault(options)) + options = "" + endif variable ft = pearl_file_type(filename) switch(ft) case 1: - if (ParamIsDefault(options)) - load_pshell_file(filename) - else - load_pshell_file(filename, options=options) - endif + load_pshell_file(filename, options=options, dest_df=dest_df, quiet=quiet) break case 2: - if (ParamIsDefault(options)) - load_hdf_file(filename) - else - load_hdf_file(filename, options=options) - endif + load_hdf_file(filename, options=options, dest_df=dest_df, quiet=quiet) break case 3: - load_itx_file(filename) - break - case 4: - load_mtrx_file(filename) + load_itx_file(filename, dest_df=dest_df, quiet=quiet) break default: break @@ -1079,18 +1093,19 @@ static function prompt_hdf_options(options) string mode = StringByKey("mode", options, ":", ";") string reduction_func = StringByKey("reduction_func", options, ":", ";") - string modes = "load_reduced" - string reduction_functions = adh5_list_reduction_funcs() + string modes = "load_scan;load_region;load_dataset;load_diags;load_complete;" + string reduction_funcs = adh5_list_reduction_funcs() + reduction_funcs = RemoveFromList("adh5_default_reduction", reduction_funcs, ";") if (strlen(mode) == 0) mode = StringFromList(0, modes, ";") endif if (strlen(reduction_func) == 0) - reduction_func = StringFromList(0, reduction_functions, ";") + reduction_func = StringFromList(0, reduction_funcs, ";") endif prompt mode, "Mode", popup, modes - prompt reduction_func, "Reduction Function", popup, reduction_functions + prompt reduction_func, "Reduction Function", popup, reduction_funcs doprompt "HDF5 Loading Options", mode, reduction_func if (v_flag == 0) @@ -1134,87 +1149,169 @@ end /// load a pshell file /// -/// load a pshell hdf5 file (complete or reduced). -/// /// if options is not specified, the complete file is loaded. /// if options is an empty string, the package default options are used. /// -/// the only supported options is `mode:load_reduced`. -/// in this case, the name of the reduction function must also be given under the `reduction_func` key. +/// data selection is extracted from the datasets list box. +/// +/// the file can be loaded in one of the following modes (`mode` key of the options argument): +/// +/// @arg `load_complete` load all datasets regardless of selection. +/// @arg `load_scan` load default datasets of selected scans. +/// @arg `load_region` load default datasets of selected regions. +/// @arg `load_dataset` load selected datasets. +/// @arg `load_diags` load diagnostic datasets of selected scans. +/// +/// 3-dimensional datasets can be loaded with dimension reduction. +/// in this case, the name of the reduction function must be given under the `reduction_func` key. /// the reduction parameters are prompted for if a prompt function for the reduction function is found. /// the default reduction parameters are the most recent parameters `s_reduction_params` stored in the package data folder. /// +/// @arg `reduction_func:...` name of the reduction function. +/// +/// if a reduction function is specified, default reduction parameters are read from `s_reduction_params` in the package data folder, +/// and the user is prompted to review/update the parameters. +/// /// @param options `key:value;` list of load options. -/// by default, load complete, using psh5_load_complete(). -/// empty string, use options from `s_hdf_options. -/// @arg `mode:load_reduced` load reduced, using psh5_load_reduced(). -/// @arg `reduction_func:...` name of the reduction function. +/// the recognized keys are: `mode`, `reduction_func` and `reduction_params`. +/// see main text for a description of possible values. +/// by default (options not specified), options are read from `s_hdf_options. +/// if the option string is empty, the user is prompted for options. /// -/// @note after the function returns, -/// the current data folder points to the loaded data (scan1). +/// @param dest_df destination data folder. default: a new folder derived from file name under root. /// -static function /df load_pshell_file(filename, [options]) +/// @param quiet @arg 0 (default) print mode and parameters to history. +/// @arg 1 do not print to history. +/// +/// @return data folder reference of the loaded data. this is the folder which contains the scan sub-folders. +/// +static function /df load_pshell_file(filename, [options, dest_df, quiet]) string filename string options + dfref dest_df + variable quiet - dfref saveDF = GetDataFolderDFR() - string nickname = ad_suggest_foldername(filename) - string loaded_filename = "" + dfref save_df = GetDataFolderDFR() + svar pref_options = $(package_path + "s_hdf_options") + svar pref_params = $(package_path + "s_reduction_params") + string path = "pearl_explorer_filepath" + if (ParamIsDefault(options)) - loaded_filename = psh5_load_complete(nickname, "pearl_explorer_filepath", filename) - else - if (strlen(options) == 0) - svar pref_options = $(package_path + "s_hdf_options") - options = pref_options - if (prompt_hdf_options(options) == 0) - // OK - pref_options = options - else - // cancel - options = "" - endif - endif - - string mode = StringByKey("mode", options, ":", ";") - - strswitch(mode) - case "load_reduced": - string reduction_func = StringByKey("reduction_func", options, ":", ";") - svar pref_params = $(package_path + "s_reduction_params") - string reduction_params = pref_params - if (prompt_func_params(reduction_func, reduction_params) == 0) - pref_params = reduction_params - print reduction_func, reduction_params - psh5_load_reduced(nickname, "pearl_explorer_filepath", filename, $reduction_func, reduction_params) - svar s_filepath - loaded_filename = filename - endif - break - endswitch + options = pref_options endif - dfref dataDF - if (strlen(loaded_filename) > 0) - setdatafolder $("root:" + nickname) - dataDF = GetDataFolderDFR() - setdatafolder $(":scan1") - string /g pearl_explorer_import = "load_pshell_file" - else - setdatafolder saveDF + if (strlen(options) == 0) + if (prompt_hdf_options(options) == 0) + pref_options = options + else + return $"" + endif + endif + + string reduction_func = StringByKey("reduction_func", options, ":", ";") + string reduction_params = pref_params + variable max_rank = 2 + + if (exists(reduction_func) == 6) + max_rank = 3 + if (prompt_func_params(reduction_func, reduction_params) == 0) + pref_params = reduction_params + else + return $"" + endif endif - return dataDF + string mode = StringByKey("mode", options, ":", ";") + string selected_datasets = WMHL_SelectedObjectsList("PearlDataExplorer", "lb_contents") + string selected_scans = psh5_extract_scan_paths(selected_datasets) + string selected_regions = psh5_extract_region_paths(selected_datasets) + variable dsc + + if (!quiet) + print mode, filename + if (strlen(reduction_func)) + print reduction_func, reduction_params + endif + endif + + strswitch(mode) + case "load_complete": + dsc = kDSCAll + dfref file_df = psh5_load(path, filename, "", "", "*", classes=dsc, reduction_func=reduction_func, reduction_params=reduction_params, dest_df=dest_df) + break + case "load_diags": + if (ItemsInList(selected_scans, ";") == 0) + if (!quiet) + print "no scan selected - defaulting to scan 1." + endif + selected_scans = "/scan1;" + endif + dsc = kDSCAttrs | kDSCDiags | kDSCSnaps | kDSCMeta | kDSCMonitors + dfref file_df = psh5_load(path, filename, selected_scans, "", "", classes=dsc, dest_df=dest_df) + break + case "load_scan": + if (ItemsInList(selected_scans, ";") == 0) + if (!quiet) + print "no scan selected - defaulting to scan 1." + endif + selected_scans = "/scan1;" + endif + dsc = kDSCPositioners | kDSCDetectors | kDSCScientaScaling | kDSCEssentialDiags + dfref file_df = psh5_load(path, filename, selected_scans, "", "", classes=dsc, max_rank=max_rank, reduction_func=reduction_func, reduction_params=reduction_params, dest_df=dest_df) + break + case "load_region": + if (ItemsInList(selected_regions, ";") == 0) + if (!quiet) + print "no region selected - defaulting to scan 1/region 1." + endif + selected_regions = "/scan1/region1;" + endif + dsc = kDSCPositioners | kDSCDetectors | kDSCScientaScaling | kDSCEssentialDiags + dfref file_df = psh5_load(path, filename, "", selected_regions, "", classes=dsc, max_rank=max_rank, reduction_func=reduction_func, reduction_params=reduction_params, dest_df=dest_df) + break + case "load_dataset": + if (ItemsInList(selected_datasets, ";") > 0) + dsc = kDSCAll + dfref file_df = psh5_load(path, filename, "", "", selected_datasets, classes=dsc, reduction_func=reduction_func, reduction_params=reduction_params, dest_df=dest_df) + else + if (!quiet) + DoAlert /T="PShell Import" 0, "Please select the datasets to load." + endif + endif + break + endswitch + + if (DataFolderRefStatus(file_df)) + setdatafolder file_df + string /g pearl_explorer_import = "load_pshell_file" + if (!quiet) + print "data loaded to", GetDataFolder(1) + endif + else + setdatafolder save_df + endif + + return file_df end -static function /df load_hdf_file(filename, [options]) +static function /df load_hdf_file(filename, [options, dest_df, quiet]) string filename string options + dfref dest_df + variable quiet - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() string nickname = ad_suggest_foldername(filename) string loaded_filename = "" + if (ParamIsDefault(dest_df) || !DataFolderRefStatus(dest_df)) + // + else + DoAlert /T="load_hdf_file" 0, "optional argument dest_df not supported." + return $"" + endif + if (ParamIsDefault(options)) loaded_filename = adh5_load_complete(nickname, "pearl_explorer_filepath", filename) else @@ -1246,32 +1343,40 @@ static function /df load_hdf_file(filename, [options]) endswitch endif - dfref dataDF + dfref data_df if (strlen(loaded_filename) > 0) setdatafolder $("root:" + nickname) - dataDF = GetDataFolderDFR() + data_df = GetDataFolderDFR() string /g pearl_explorer_import = "load_hdf_file" else - setdatafolder saveDF + setdatafolder save_df endif - return dataDF + return data_df end -static function /df load_itx_file(filename, [options]) +static function /df load_itx_file(filename, [options, dest_df, quiet]) string filename string options + dfref dest_df + variable quiet - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() string nickname = itx_suggest_foldername(filename) if (ParamIsDefault(options)) options = "" endif - setdatafolder root: - newdatafolder /s/o $("root:" + nickname) - dfref dataDF = GetDataFolderDFR() + variable own_data_df = 0 + if (ParamIsDefault(dest_df) || !DataFolderRefStatus(dest_df)) + setdatafolder root: + newdatafolder /s/o $("root:" + nickname) + own_data_df = 1 + else + setdatafolder dest_df + endif + dfref data_df = GetDataFolderDFR() // note: some versions of PEARL data files save data to a new data folder, // and leave the newly created folder as the current folder. @@ -1280,65 +1385,50 @@ static function /df load_itx_file(filename, [options]) LoadWave /t/p=pearl_explorer_filepath/q filename svar waves = s_wavenames - dfref actDF = GetDataFolderDFR() + dfref act_df = GetDataFolderDFR() if (v_flag > 0) string /g pearl_explorer_import = "load_itx_file" endif - if (!DataFolderRefsEqual(actDF, dataDF)) + if (!DataFolderRefsEqual(act_df, data_df) && own_data_df) // the file created its own data folder. // let's kill the pre-allocated folder - setdatafolder dataDF + setdatafolder data_df if (ItemsInList(WaveList("*", ";", ""), ";") == 0) - killdatafolder /z dataDF + killdatafolder /z data_df endif endif - setdatafolder saveDF - return actDF + setdatafolder save_df + return act_df end -/// load a matrix (STM) data file +/// suggest the name of a data folder based on an igor-text file name /// +/// if the file name follows the naming convention source-date-index.extension, +/// the function tries to generate the nick name as source_date_index. +/// otherwise it's just a cleaned up version of the file name. +/// +/// igor text is used by the on-the-fly scan tool. +/// +/// @param filename file name, including extension. +/// can also include a folder path (which is ignored). +/// the extension is currently ignored, +/// but may be used in a later version. +/// @param ignoredate if non-zero, the nick name will not include the date part. +/// defaults to zero +/// @param sourcename nick name of the data source. +/// the function tries to detect the source from the file name. +/// this option can be used to override auto-detection. +/// allowed values: sscan, otf +/// @param unique if non-zero, the resulting name is made a unique data folder name in the current data folder. +/// defaults to zero /// -static function /df load_mtrx_file(filename, [options]) - string filename - string options - - dfref saveDF = GetDataFolderDFR() - dfref dataDF = $"" - -#if exists("MFR_OpenResultFile") - setdatafolder root: - string datasets = "" - datasets = mtrx_load_file("pearl_explorer_filepath", filename) - if (strlen(datasets) > 0) - string /g pearl_explorer_import = "load_mtrx_file" - string s1 = StringFromList(0, datasets) - wave w1 = $s1 - dataDF = GetWavesDataFolderDFR(w1) - endif -#endif - - setdatafolder saveDF - return dataDF -end - function /s itx_suggest_foldername(filename, [ignoredate,sourcename,unique]) - // suggests the name of a data folder based on a file name - // if the file name follows the naming convention source-date-index.extension, - // the function tries to generate the nick name as source_date_index. - // otherwise it's just a cleaned up version of the file name. - string filename // file name, including extension. can also include a folder path (which is ignored) - // the extension is currently ignored, but may be used later to select the parent folder - variable ignoredate // if non-zero, the nick name will not include the date part - // defaults to zero - string sourcename // nick name of the data source - // the function tries to detect the source from the file name - // this option can be used to override auto-detection - // allowed values: sscan, otf - variable unique // if non-zero, the resulting name is made a unique data folder name in the current data folder - // defaults to zero + string filename + variable ignoredate + string sourcename + variable unique if (ParamIsDefault(ignoredate)) ignoredate = 0 @@ -1383,103 +1473,155 @@ function /s itx_suggest_foldername(filename, [ignoredate,sourcename,unique]) return nickname end +// ====== panel ====== + Window PearlDataExplorer() : Panel PauseUpdate; Silent 1 // building window... - NewPanel /K=1 /W=(800,0,1530,444) as "PEARL Data Explorer" + NewPanel /K=1 /W=(510,45,1190,539) as "PEARL Data Explorer" ModifyPanel cbRGB=(48640,56832,60160) - - GroupBox gb_filepath,pos={8,4},size={224,52},title="file system folder" - TitleBox tb_filepath,pos={20,24},size={174,20},frame=2 - TitleBox tb_filepath,variable=root:packages:pearl_explorer:s_filepath,fixedSize=1 - Button b_browse_filepath,pos={200,24},size={20,20},proc=PearlDataExplorer#bp_browse_filepath,title="..." + GroupBox g_data_reduction,pos={355.00,370.00},size={306.00,49.00},title="data reduction" + GroupBox g_data_reduction,help={"data reduction of 3D ScientaImage"} + GroupBox gb_filepath,pos={8.00,4.00},size={328.00,48.00},title="file system folder" + TitleBox tb_filepath,pos={20.00,28.00},size={279.00,21.00},frame=0 + TitleBox tb_filepath,variable= root:packages:pearl_explorer:s_short_filepath,fixedSize=1 + Button b_browse_filepath,pos={303.00,24.00},size={20.00,20.00},proc=PearlDataExplorer#bp_browse_filepath,title="..." Button b_browse_filepath,fColor=(65280,48896,32768) - - GroupBox gb_prefs,pos={240,4},size={58,52},title="prefs",help={"explorer package preferences"} - Button b_save_prefs,pos={252,20},size={32,17},proc=PearlDataExplorer#bp_save_prefs,title="save" + GroupBox gb_prefs,pos={8.00,351.00},size={65.00,131.00},title="prefs" + GroupBox gb_prefs,help={"explorer package preferences"} + Button b_save_prefs,pos={21.00,394.00},size={38.00,17.00},proc=PearlDataExplorer#bp_save_prefs,title="save" Button b_save_prefs,help={"save preferences of the data explorer package (data file path, attributes filter)"} Button b_save_prefs,fColor=(65280,48896,32768) - Button b_load_prefs,pos={252,36},size={32,17},proc=PearlDataExplorer#bp_load_prefs,title="load" + Button b_load_prefs,pos={21.00,374.00},size={38.00,17.00},proc=PearlDataExplorer#bp_load_prefs,title="load" Button b_load_prefs,help={"load preferences of the data explorer package"} Button b_load_prefs,fColor=(65280,48896,32768) - - GroupBox gb_filelist,pos={8,64},size={224,372},title="data files" - ListBox lb_files,pos={20,84},size={200,212},proc=PearlDataExplorer#lbp_filelist + GroupBox gb_filelist,pos={8.00,55.00},size={328.00,293.00},title="data files" + ListBox lb_files,pos={20.00,83.00},size={303.00,222.00},proc=PearlDataExplorer#lbp_filelist ListBox lb_files,listWave=root:packages:pearl_explorer:wtFiles - ListBox lb_files,selWave=root:packages:pearl_explorer:wSelectedFiles,row= 11,mode= 4 - TitleBox tb_file_info,pos={20,300},size={198,78},frame=2,fixedSize=1 - TitleBox tb_file_info,variable= root:packages:pearl_explorer:s_file_info - - Button b_update_filelist,pos={20,386},size={60,20},proc=PearlDataExplorer#bp_update_filelist,title="update list" + ListBox lb_files,selWave=root:packages:pearl_explorer:wSelectedFiles,mode= 4 + Button b_update_filelist,pos={246.00,315.00},size={76.00,22.00},proc=PearlDataExplorer#bp_update_filelist,title="update list" Button b_update_filelist,fColor=(65280,48896,32768) - CheckBox cb_file_preview,pos={84,390},size={60,20},title="preview" + CheckBox cb_file_preview,pos={78.00,318.00},size={59.00,14.00},title="preview" CheckBox cb_file_preview,help={"enable/disable automatic preview window when selecting a data file"} - CheckBox cb_file_preview,value=1 - Button b_file_prev,pos={176,386},size={20,20},proc=PearlDataExplorer#bp_file_prev,title="\\W646" - Button b_file_prev,help={"previous file"} - Button b_file_prev,fColor=(65280,48896,32768) - Button b_file_next,pos={200,386},size={20,20},proc=PearlDataExplorer#bp_file_next,title="\\W649" - Button b_file_next,help={"next file"} - Button b_file_next,fColor=(65280,48896,32768) - - Button b_load_files,pos={20,410},size={76,20},proc=PearlDataExplorer#bp_load_files,title="load complete" - Button b_load_files,help={"load the complete contents from the selected files"} - Button b_load_files,fColor=(65280,48896,32768) - Button b_load_files_opt,pos={100,410},size={76,20},proc=PearlDataExplorer#bp_load_files_opt,title="load reduced" - Button b_load_files_opt,help={"load data from the selected files with options (reduced dimensions)"} - Button b_load_files_opt,fColor=(65280,48896,32768) - - // datasets group - GroupBox gb_datasets,pos={240,64},size={224,372},title="datasets" - ListBox lb_datasets,pos={252,84},size={200,300},proc=PearlDataExplorer#lbp_datasets,help={"list of loaded datasets"} - ListBox lb_datasets,listWave=root:packages:pearl_explorer:wtDatasets - ListBox lb_datasets,selWave=root:packages:pearl_explorer:wSelectedDatasets,mode= 1 - ListBox lb_datasets,selRow= -1 - - Button b_update_datasets,pos={252,386},size={60,20},proc=PearlDataExplorer#bp_update_datasets,title="update list" - Button b_update_datasets,help={"update the list of datasets"} - Button b_update_datasets,fColor=(65280,48896,32768) - CheckBox cb_dataset_preview,pos={316,390},size={60,20},title="preview" - CheckBox cb_dataset_preview,help={"enable/disable automatic preview window when selecting a dataset"} - CheckBox cb_dataset_preview,value=0 - Button b_dataset_prev,pos={408,386},size={20,20},proc=PearlDataExplorer#bp_dataset_prev,title="\\W646" - Button b_dataset_prev,help={"goto previous dataset"} - Button b_dataset_prev,fColor=(65280,48896,32768) - Button b_dataset_next,pos={432,386},size={20,20},proc=PearlDataExplorer#bp_dataset_next,title="\\W649" - Button b_dataset_next,help={"goto next dataset"} - Button b_dataset_next,fColor=(65280,48896,32768) - - Button b_dataset_folder,pos={252,410},size={50,20},proc=PearlDataExplorer#bp_dataset_folder,title="goto DF" - Button b_dataset_folder,help={"set the current data folder of the selected dataset"} - Button b_dataset_folder,fColor=(65280,48896,32768) - Button b_dataset_display,pos={306,410},size={50,20},proc=PearlDataExplorer#bp_dataset_display,title="display" - Button b_dataset_display,help={"display the selected dataset in its own window"} - Button b_dataset_display,fColor=(65280,48896,32768) - - GroupBox gb_preview,pos={472,4},size={250,52},title="preview" - TitleBox tb_preview_file,pos={484,24},size={226,20},frame=2 - TitleBox tb_preview_file,variable=root:packages:pearl_explorer:s_preview_file,fixedSize=1 - - GroupBox gb_attributes,pos={472,64},size={250,372},title="attributes" - Button b_attr_notebook,pos={484,386},size={60,20},proc=PearlDataExplorer#bp_attr_notebook,title="notebook" - Button b_attr_notebook,help={"show attribute list in a notebook"} + CheckBox cb_file_preview,value= 0 + Button b_file_prev,pos={20.00,314.00},size={22.00,22.00},proc=PearlDataExplorer#bp_file_prev,title="\\W646" + Button b_file_prev,help={"previous file"},fColor=(65280,48896,32768) + Button b_file_next,pos={44.00,314.00},size={22.00,22.00},proc=PearlDataExplorer#bp_file_next,title="\\W649" + Button b_file_next,help={"next file"},fColor=(65280,48896,32768) + Button b_goto_dataset,pos={355.00,315.00},size={64.00,22.00},disable=2,proc=PearlDataExplorer#bp_goto_dataset,title="goto DF" + Button b_goto_dataset,help={"change the current data folder ot where the selected dataset could be located"} + Button b_goto_dataset,fColor=(65280,48896,32768) + Button b_display_dataset,pos={423.00,315.00},size={64.00,22.00},disable=2,proc=PearlDataExplorer#bp_display_dataset,title="display" + Button b_display_dataset,help={"display the selected dataset in its own window"} + Button b_display_dataset,fColor=(65280,48896,32768) + Button b_load_complete,pos={355.00,451.00},size={92.00,22.00},disable=2,proc=PearlDataExplorer#bp_load_options,title="all data" + Button b_load_complete,help={"load all datasets of the selected file."} + Button b_load_complete,userdata= "mode:load_complete;" + Button b_load_complete,fColor=(65280,48896,32768) + TitleBox tb_selected_file,pos={360.00,28.00},size={309.00,22.00},frame=0 + TitleBox tb_selected_file,variable= root:packages:pearl_explorer:s_selected_file,fixedSize=1 + GroupBox gb_contents,pos={346.00,55.00},size={327.00,294.00},title="datasets" + Button b_attr_notebook,pos={97.00,375.00},size={64.00,22.00},disable=2,proc=PearlDataExplorer#bp_attr_notebook,title="notebook" + Button b_attr_notebook,help={"show a summary of attributes in a notebook window"} Button b_attr_notebook,fColor=(65280,48896,32768) - - String fldrSav0= GetDataFolder(1) - SetDataFolder root:packages:pearl_explorer: - Edit/W=(484,84,710,384)/HOST=# attr_names,attr_values - ModifyTable format(Point)=1,width(Point)=0,width(attr_names)=103,width(attr_values)=103 - ModifyTable statsArea=85 - SetDataFolder fldrSav0 - RenameWindow #,T0 - SetActiveSubwindow ## + ListBox lb_contents,pos={355.00,84.00},size={305.00,222.00} + ListBox lb_contents,keySelectCol= 1 + GroupBox gb_selected_file,pos={346.00,4.00},size={328.00,48.00},title="selected file" + Button b_load_region,pos={355.00,426.00},size={92.00,22.00},disable=2,proc=PearlDataExplorer#bp_load_options,title="region" + Button b_load_region,help={"load the selected region"} + Button b_load_region,userdata= "mode:load_region;",fColor=(65280,48896,32768) + PopupMenu popup_reduction,pos={366.00,391.00},size={200.00,17.00},bodyWidth=200,proc=PearlDataExplorer#pmp_reduction_func + PopupMenu popup_reduction,help={"data reduction of 3d ScientaImage. note: the list may contain unsuitable functions. check the code or documentation!"} + PopupMenu popup_reduction,mode=1,popvalue="None",value= #"PearlDataExplorer#pm_reduction_values()" + GroupBox group_import,pos={346.00,351.00},size={326.00,131.00},title="import" + Button b_load_scan,pos={450.00,426.00},size={94.00,22.00},disable=2,proc=PearlDataExplorer#bp_load_options,title="scan" + Button b_load_scan,help={"load the selected scan"},userdata= "mode:load_scan;" + Button b_load_scan,fColor=(65280,48896,32768) + Button b_load_diags,pos={450.00,451.00},size={94.00,22.00},disable=2,proc=PearlDataExplorer#bp_load_options,title="diagnostics" + Button b_load_diags,help={"load diagnostics of selected scans"},userdata= "mode:load_diags;" + Button b_load_diags,fColor=(65280,48896,32768) + Button b_load_dataset,pos={547.00,426.00},size={101.00,22.00},disable=2,proc=PearlDataExplorer#bp_load_options,title="dataset" + Button b_load_dataset,help={"load the selected datasets"} + Button b_load_dataset,userdata= "mode:load_dataset;",fColor=(65280,48896,32768) + Button b_reduction_params,pos={571.00,390.00},size={71.00,19.00},disable=2,proc=PearlDataExplorer#bp_reduction_params,title="set params" + Button b_reduction_params,help={"set data reduction parameters"} + Button b_reduction_params,fColor=(65280,48896,32768) + GroupBox g_fileinfo,pos={85.00,351.00},size={251.00,131.00},title="file info" + Button b_elog,pos={97.00,401.00},size={64.00,22.00},disable=2,proc=PearlDataExplorer#bp_elog,title="ELOG" + Button b_elog,help={"send file metadata to ELOG panel (does not submit to ELOG)"} + Button b_elog,fColor=(65280,48896,32768) + ToolsGrid grid=(0,28.35,5) EndMacro +/// update controls state +/// +static function update_controls() + dfref package_df = $package_path + svar /z /sdfr=package_df hl_contents_datasets + wave /z /sdfr=package_df wSelectedFiles + + variable file_selected = 0 + if (WaveExists(wSelectedFiles)) + file_selected = sum(wSelectedFiles) + endif + + string selected_datasets = WMHL_SelectedObjectsList("PearlDataExplorer", "lb_contents") + variable scan_selected = strsearch(selected_datasets, "scan", 0, 2) == 0 + variable region_selected = strsearch(selected_datasets, "region", 0, 2) >= 0 + + variable dataset_selected = 0 + variable nds = ItemsInList(selected_datasets, ";") + variable ids + string ds + if (svar_exists(hl_contents_datasets)) + for (ids = 0; ids < nds; ids += 1) + ds = "/" + StringFromList(ids, selected_datasets, ";") + if (NumType(NumberByKey(ds, hl_contents_datasets, ":", ";")) == 0) + dataset_selected = 1 + break + endif + endfor + else + nds = 0 + endif + + variable dis + dis = file_selected ? 0 : 2 + Button b_load_complete win=PearlDataExplorer,disable=dis + Button b_load_diags win=PearlDataExplorer,disable=dis + dis = file_selected && scan_selected ? 0 : 2 + Button b_attr_notebook win=PearlDataExplorer,disable=dis + + dis = file_selected && (strlen(WinList("*ElogPanel*", ";", "WIN:64")) > 1) ? 0 : 2 + Button b_elog win=PearlDataExplorer,disable=dis + dis = scan_selected ? 0 : 2 + Button b_load_scan win=PearlDataExplorer,disable=dis + dis = region_selected ? 0 : 2 + Button b_load_region win=PearlDataExplorer,disable=dis + dis = dataset_selected ? 0 : 2 + Button b_load_dataset win=PearlDataExplorer,disable=dis + Button b_display_dataset win=PearlDataExplorer,disable=dis + dis = file_selected && (nds > 0) ? 0 : 2 + Button b_goto_dataset win=PearlDataExplorer,disable=dis + + ControlInfo /W=PearlDataExplorer popup_reduction + if ((cmpstr(S_Value, "None") != 0) && (exists(S_Value) == 6)) + GroupBox g_data_reduction win=PearlDataExplorer,labelBack=(65535,49151,49151) + Button b_reduction_params win=PearlDataExplorer,disable=0 + else + GroupBox g_data_reduction win=PearlDataExplorer,labelBack=0 + Button b_reduction_params win=PearlDataExplorer,disable=2 + endif + + return 0 +end + static function bp_load_prefs(ba) : ButtonControl STRUCT WMButtonAction &ba switch( ba.eventCode ) case 2: // mouse up load_prefs() + update_controls() break case -1: // control being killed break @@ -1502,27 +1644,52 @@ static function bp_save_prefs(ba) : ButtonControl return 0 End +/// shorten a file path for display +/// +/// @note the result is not a valid path any more! +/// +static function /s shorten_filepath(long_path, max_len) + string long_path + variable max_len + + string path = long_path + variable ellipsis = 0 + do + if (strlen(path) > max_len) + path = RemoveListItem(1, path, ":") + ellipsis += 1 + else + break + endif + while (1) + + if (ellipsis >= 1) + path = AddListItem("…", path, ":", 1) + endif + + return path +end + static function bp_browse_filepath(ba) : ButtonControl STRUCT WMButtonAction &ba - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() switch( ba.eventCode ) case 2: // mouse up PathInfo /S pearl_explorer_filepath NewPath /M="select data file folder" /O/Z pearl_explorer_filepath if (v_flag == 0) - PathInfo /S pearl_explorer_filepath - svar filepath = $(package_path + "s_filepath") - filepath = s_path + update_filepath() update_filelist() + update_controls() endif break case -1: // control being killed break endswitch - setdatafolder saveDF + setdatafolder save_df return 0 End @@ -1532,6 +1699,7 @@ static function bp_update_filelist(ba) : ButtonControl switch( ba.eventCode ) case 2: // mouse up update_filelist() + update_controls() break case -1: // control being killed break @@ -1540,12 +1708,44 @@ static function bp_update_filelist(ba) : ButtonControl return 0 End -static function bp_load_files(ba) : ButtonControl +/// items for data reduction popup +static function /s pm_reduction_values() + string reduction_funcs = adh5_list_reduction_funcs() + reduction_funcs = RemoveFromList("adh5_default_reduction", reduction_funcs, ";") + reduction_funcs = AddListItem("None", reduction_funcs, ";", 0) + return reduction_funcs +end + +static function pmp_reduction_func(pa) : PopupMenuControl + STRUCT WMPopupAction &pa + + switch( pa.eventCode ) + case 2: // mouse up + Variable popNum = pa.popNum + String popStr = pa.popStr + update_controls() + break + case -1: // control being killed + break + endswitch + + return 0 +End + +static function bp_reduction_params(ba) : ButtonControl STRUCT WMButtonAction &ba switch( ba.eventCode ) case 2: // mouse up - load_selected_files() + ControlInfo /W=PearlDataExplorer popup_reduction + if ((cmpstr(S_Value, "None") != 0) && (exists(S_Value) == 6)) + svar pref_params = $(package_path + "s_reduction_params") + string reduction_func = S_Value + string reduction_params = pref_params + if (prompt_func_params(reduction_func, reduction_params) == 0) + pref_params = reduction_params + endif + endif break case -1: // control being killed break @@ -1554,12 +1754,22 @@ static function bp_load_files(ba) : ButtonControl return 0 End -static function bp_load_files_opt(ba) : ButtonControl +static function bp_load_options(ba) : ButtonControl STRUCT WMButtonAction &ba switch( ba.eventCode ) case 2: // mouse up - load_selected_files(options="") + // options must be in the button's unnamed user data in the form: "mode:load_complete". + // see load_pshell_file for recognized values. + string options=ba.userData + + // data reduction popup + ControlInfo /W=PearlDataExplorer popup_reduction + if ((cmpstr(S_Value, "None") != 0) && (exists(S_Value) == 6)) + options = ReplaceStringByKey("reduction_func", options, S_Value, ":", ";") + endif + + load_selected_files(options=options) break case -1: // control being killed break @@ -1568,10 +1778,38 @@ static function bp_load_files_opt(ba) : ButtonControl return 0 End +/// actions after a file has been selected +/// +/// - load metadata +/// - load preview if requested +/// +/// @param file name of selected file +/// @param do_preview enable/disable loading of preview data +/// non-zero: load preview, +/// zero: don't load preview +/// +static function selected_file(file, do_preview) + string file + variable do_preview + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar s_selected_file + s_selected_file = file + get_file_info(file) + if (do_preview) + preview_file(file) + endif + update_controls() + + setdatafolder save_df + return 0 +end + static function bp_file_next(ba) : ButtonControl STRUCT WMButtonAction &ba - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() switch( ba.eventCode ) case 2: // mouse up @@ -1587,23 +1825,22 @@ static function bp_file_next(ba) : ButtonControl if (v_value >= 0) variable ifile = v_value ControlInfo /W=PearlDataExplorer cb_file_preview - if (v_value) - preview_file(wtFiles[ifile]) - endif + selected_file(wtFiles[ifile], v_value) endif + update_controls() break case -1: // control being killed break endswitch - setdatafolder saveDF + setdatafolder save_df return 0 End static function bp_file_prev(ba) : ButtonControl STRUCT WMButtonAction &ba - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() switch( ba.eventCode ) case 2: // mouse up @@ -1619,22 +1856,23 @@ static function bp_file_prev(ba) : ButtonControl if (v_value >= 0) variable ifile = v_value ControlInfo /W=PearlDataExplorer cb_file_preview - if (v_value) - preview_file(wtFiles[ifile]) - endif + selected_file(wtFiles[ifile], v_value) endif + update_controls() break case -1: // control being killed break endswitch - setdatafolder saveDF + setdatafolder save_df return 0 End static function lbp_filelist(lba) : ListBoxControl STRUCT WMListboxAction &lba + dfref save_df = GetDataFolderDFR() + Variable row = lba.row Variable col = lba.col WAVE/T/Z listWave = lba.listWave @@ -1644,161 +1882,17 @@ static function lbp_filelist(lba) : ListBoxControl case -1: // control being killed break case 1: // mouse down + setdatafolder $package_path + wave wSelectedFiles if (selWave[row]) - ControlInfo /W=PearlDataExplorer cb_file_preview - if (v_value) - preview_file(listWave[row]) - endif - endif - break - case 3: // double click - break - case 4: // cell selection - case 5: // cell selection plus shift key - break - case 6: // begin edit - break - case 7: // finish edit - break - case 13: // checkbox clicked (Igor 6.2 or later) - break - endswitch - - return 0 -End - -static function bp_update_datasets(ba) : ButtonControl - STRUCT WMButtonAction &ba - - switch( ba.eventCode ) - case 2: // mouse up - update_datasets() - break - case -1: // control being killed - break - endswitch - - return 0 -End - -static function bp_dataset_folder(ba) : ButtonControl - STRUCT WMButtonAction &ba - - switch( ba.eventCode ) - case 2: // mouse up - ControlInfo /W=PearlDataExplorer lb_datasets - if (v_value >= 0) - setdatafolder $package_path - wave /t wtDatasets - string dataset = wtDatasets[v_value] - string cmd - sprintf cmd, "setdatafolder root:%s", PossiblyQuoteName(dataset) - execute /q /z cmd - cmd = "setdatafolder :scan1" - execute /q /z cmd - sprintf cmd, "setdatafolder %s", GetDataFolder(1) - print cmd - endif - break - case -1: // control being killed - break - endswitch - - return 0 -End - -static function bp_dataset_display(ba) : ButtonControl - STRUCT WMButtonAction &ba - - switch( ba.eventCode ) - case 2: // mouse up - ControlInfo /W=PearlDataExplorer lb_datasets - if (v_value >= 0) - setdatafolder $package_path - wave /t wtDatasets - string dataset = wtDatasets[v_value] - display_dataset(dataset) - endif - break - case -1: // control being killed - break - endswitch - - return 0 -End - -static function bp_dataset_next(ba) : ButtonControl - STRUCT WMButtonAction &ba - - switch( ba.eventCode ) - case 2: // mouse up - ControlInfo /W=PearlDataExplorer lb_datasets - wave /t wtDatasets = $(s_datafolder + s_value) - v_value += 1 - if (v_value >= numpnts(wtDatasets)) - v_value = min(0, numpnts(wtDatasets) - 1) - endif - ListBox lb_datasets win=PearlDataExplorer, selRow=v_value - if (v_value >= 0) - variable ids = v_value - ControlInfo /W=PearlDataExplorer cb_dataset_preview - if (v_value) - preview_dataset(wtDatasets[ids]) - endif - endif - break - case -1: // control being killed - break - endswitch - - return 0 -End - -static function bp_dataset_prev(ba) : ButtonControl - STRUCT WMButtonAction &ba - - switch( ba.eventCode ) - case 2: // mouse up - ControlInfo /W=PearlDataExplorer lb_datasets - wave /t wtDatasets = $(s_datafolder + s_value) - v_value -= 1 - if (v_value < 0) - v_value = max(-1, numpnts(wtDatasets) - 1) - endif - ListBox lb_datasets win=PearlDataExplorer, selRow=v_value - if (v_value >= 0) - variable ids = v_value - ControlInfo /W=PearlDataExplorer cb_dataset_preview - if (v_value) - preview_dataset(wtDatasets[ids]) - endif - endif - break - case -1: // control being killed - break - endswitch - - return 0 -End - -static function lbp_datasets(lba) : ListBoxControl - STRUCT WMListboxAction &lba - - Variable row = lba.row - Variable col = lba.col - WAVE/T/Z listWave = lba.listWave - WAVE/Z selWave = lba.selWave - - switch( lba.eventCode ) - case -1: // control being killed - break - case 1: // mouse down - if (row >= 0) - ControlInfo /W=PearlDataExplorer cb_dataset_preview - if (v_value) - preview_dataset(listWave[row]) + if (sum(wSelectedFiles) == 1) + ControlInfo /W=PearlDataExplorer cb_file_preview + selected_file(listWave[row], v_value) + else + selected_file(listWave[row], 0) endif endif + update_controls() break case 3: // double click break @@ -1813,28 +1907,458 @@ static function lbp_datasets(lba) : ListBoxControl break endswitch + setdatafolder save_df return 0 End static function bp_attr_notebook(ba) : ButtonControl STRUCT WMButtonAction &ba - dfref saveDF = GetDataFolderDFR() + dfref save_df = GetDataFolderDFR() switch( ba.eventCode ) case 2: // mouse up setdatafolder $package_path - svar s_preview_file - wave /t /z attr_names - wave /t /z attr_values - if (WaveExists(attr_names)) - attributes_notebook(attr_names, attr_values, s_preview_file) - endif + wave wSelectedFiles + wave/t wtFiles + variable nn = numpnts(wSelectedFiles) + variable ii + for (ii = 0; ii < nn; ii += 1) + if (wSelectedFiles[ii]) + attributes_notebook(wtFiles[ii]) + break + endif + endfor break case -1: // control being killed break endswitch - setdatafolder saveDF + setdatafolder save_df return 0 End + + +static function hlp_setup() + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + + MakeListIntoHierarchicalList("PearlDataExplorer", "lb_contents", "hlp_contents_open", selectionMode=WMHL_SelectionSingle, pathSeparator="/") + + setdatafolder save_df + return 0 +end + +static function hl_contents_clear() + do + if (cmpstr(WMHL_GetItemForRowNumber("PearlDataExplorer", "lb_contents", 0), "") != 0) + WMHL_DeleteRowAndChildren("PearlDataExplorer", "lb_contents", 0) + else + break + endif + while (1) +end + +/// populate the contents list box with the internal directory of a HDF5 file +/// +/// @return the number of top-level objects +/// +static function hl_contents_update(file_df) + dfref file_df + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + hl_contents_clear() + + variable nds + variable ids + string ds + string extra + string /g hl_contents_datasets = "" + + if (DataFolderRefStatus(file_df)) + svar /sdfr=file_df datasets = s_datasets + svar /sdfr=file_df datatypes = s_datasets_datatypes + svar /sdfr=file_df ranks = s_datasets_ranks + svar /sdfr=file_df dimensions = s_datasets_dimensions + + nds = ItemsInList(datasets, ";") + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(ids, datasets, ";") + extra = StringFromList(ids, dimensions, ";") + hl_contents_datasets = ReplaceStringByKey(ds, hl_contents_datasets, extra, ":", ";") + endfor + endif + + variable nobj = hl_add_objects("", hl_contents_datasets) + hl_expand_scans() + hl_default_selection() + setdatafolder save_df + + return nobj +end + +static function /df get_pshell_info(path_name, file_name, [dest_df]) + string path_name + string file_name + dfref dest_df + + dfref save_df = GetDataFolderDFR() + + if (!ParamIsDefault(dest_df)) + setdatafolder dest_df + else + setdatafolder $package_path + NewDataFolder /o /s file_info + endif + + dfref file_df = psh5_open_file(path_name, file_name, dest_df=GetDataFolderDFR()) + if (DataFolderRefStatus(file_df)) + psh5_load_general_group(file_df) + psh5_close_file(file_df) + endif + + setdatafolder save_df + return file_df +end + +/// populate the contents list box with the given hierarchical paths +/// +/// @return the number of top-level objects +/// +static function hl_add_objects(parent_path, objects) + string parent_path // e.g. "/a/b" + string objects // all objects that might appear in the list. e.g. "/a/b/c:col0|col1;/a/b/d:col0|col1;/d/e/f:col0|col1;" + + if (cmpstr(parent_path[0], "/") != 0) + parent_path = "/" + parent_path + endif + + variable nobj = ItemsInList(objects, ";") + variable iobj + string obj + string extra + variable nel + + string child_path = "" + string child_name = "" + string child_names = "" // e.g., "child1:1;child3:2;" + string extra_data = "" // e.g., "child1:col0|col1;child2:col0|col1;" + + // filter children of parent + for (iobj = 0; iobj < nobj; iobj += 1) + obj = StringFromList(iobj, objects, ";") + + if (cmpstr(obj[0, strlen(parent_path)-1], parent_path) == 0) + child_path = StringFromList(0, obj, ":") + child_path = child_path[strlen(parent_path), strlen(child_path)-1] + if (cmpstr(child_path[0], "/") == 0) + child_path = child_path[1, strlen(child_path)-1] + endif + child_name = StringFromList(0, child_path, "/") + nel = ItemsInList(child_path, "/") + child_names = ReplaceNumberByKey(child_name, child_names, nel) + if (nel == 1) + extra = RemoveListItem(0, obj, ":") + extra_data = ReplaceStringByKey(child_name, extra_data, extra) + endif + endif + endfor + + // add rows + variable row + variable children + nobj = ItemsInList(child_names) + for (iobj = 0; iobj < nobj; iobj += 1) + obj = StringFromList(iobj, child_names) + child_name = StringFromList(0, obj, ":") + nel = NumberByKey(child_name, child_names) + WMHL_AddObject("PearlDataExplorer", "lb_contents", parent_path[1, strlen(parent_path)], child_name, nel > 1) + if (nel == 1) + extra = StringByKey(child_name, extra_data) + row = WMHL_GetRowNumberForItem("PearlDataExplorer", "lb_contents", parent_path[1, strlen(parent_path)] + "/" + child_name) + if (row >= 0) + WMHL_ExtraColumnData("PearlDataExplorer", "lb_contents", 0, row, StringFromList(0, extra, "|"), 0) + endif + endif + endfor + + return nobj +end + +static function hl_expand_scans() + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + + svar hl_contents_datasets + variable nds = ItemsInList(hl_contents_datasets, ";") + variable ids + string sds + string scan + string scans = "" + for (ids = 0; ids < nds; ids += 1) + sds = StringFromList(ids, hl_contents_datasets, ";") + if (cmpstr(sds[0,4], "/scan", 0) == 0) + scan = StringFromList(1, sds, "/") + scans = ReplaceNumberByKey(scan, scans, 1) + endif + endfor + + variable nscans = ItemsInList(scans) + variable iscan + for (iscan = 0; iscan < nscans; iscan += 1) + scan = StringFromList(iscan, scans) + scan = StringFromList(0, scan, ":") + WMHL_OpenAContainer("PearlDataExplorer", "lb_contents", scan) + endfor + + setdatafolder save_df +end + +static function hl_default_selection() + variable row + row = WMHL_GetRowNumberForItem("PearlDataExplorer", "lb_contents", "scan 1") + if (row < 0) + row = WMHL_GetRowNumberForItem("PearlDataExplorer", "lb_contents", "scan1") + endif + if (row >= 0) + WMHL_SelectARow("PearlDataExplorer", "lb_contents", row, 1) + endif +end + +static function hlp_contents_open(HostWindow, ListControlName, ContainerPath) + String HostWindow, ListControlName, ContainerPath + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar hl_contents_datasets + hl_add_objects(ContainerPath, hl_contents_datasets) + setdatafolder save_df +end + +static function hlp_contents_selection(HostWindow, ListControlName, SelectedItem, EventCode) + String HostWindow, ListControlName + String SelectedItem + Variable EventCode + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + + switch (eventCode) + case 3: // double click + // todo: load dataset? + break + case 4: // cell selection + case 5: // cell selection plus shift key + update_controls() + break + endswitch + + setdatafolder save_df + return 0 +end + +/// open data folder corresponding to a file and data path +/// +/// the function tries to find where a given dataset has been loaded +/// and selects the corresponding data folder. +/// the data folder must exist (after previous import from the file), +/// else an error code is returned and the folder selection will be the closest accessible parent folder of the target. +/// +/// @param filename file name (without path). +/// h5 and otf.itx files are supported. +/// @param datapath dataset or group path inside the hdf5 file. +/// @return 0 if successful, +/// -1 if the file type is unknown +/// -2 if the data path can't be found in the tree. +/// +static function goto_dataset_folder(filename, datapath) + string filename + string datapath + + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + + variable ft = pearl_file_type(filename) + string parent_folder + string folder + string path + + switch(ft) + case 1: + case 2: + parent_folder = ad_suggest_foldername(filename) + path = "root:" + parent_folder + if (DataFolderExists(path)) + setdatafolder $path + else + return -2 + endif + + variable nparts = ItemsInList(datapath, "/") + variable ipart + for (ipart = 0; ipart < nparts; ipart += 1) + folder = StringFromList(ipart, datapath, "/") + path = ":" + ps_fix_folder_name(folder) + if (DataFolderExists(path)) + setdatafolder $path + endif + endfor + break + + case 3: + parent_folder = "root:" + itx_suggest_foldername(filename) + if (DataFolderExists(parent_folder)) + setdatafolder $parent_folder + else + return -2 + endif + break + + default: + // unsupported file type + return -1 + endswitch + + return 0 +end + +/// "goto DF" button +/// +/// the button selects the data folder of the selected file and dataset. +/// an error message is shown if the data folder doesn't exist. +/// +static function bp_goto_dataset(ba) : ButtonControl + STRUCT WMButtonAction &ba + + switch( ba.eventCode ) + case 2: // mouse up + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar s_selected_file + svar hl_contents_datasets + string datapath = StringFromList(0, WMHL_SelectedObjectsList("PearlDataExplorer", "lb_contents")) + if (strsearch(hl_contents_datasets, datapath, 0) != 0) + datapath = datapath + "/" + endif + variable result = goto_dataset_folder(s_selected_file, datapath) + if (result != 0) + setdatafolder save_df + string msg + msg = "Can't find data folder. Has the file been loaded?" + DoAlert /T="Goto DF" 0, msg + endif + + break + case -1: // control being killed + break + endswitch + + return 0 +End + +/// "display dataset" button +/// +/// +static function bp_display_dataset(ba) : ButtonControl + STRUCT WMButtonAction &ba + + switch( ba.eventCode ) + case 2: // mouse up + dfref save_df = GetDataFolderDFR() + setdatafolder $package_path + svar s_selected_file + svar hl_contents_datasets + string datapath = StringFromList(0, WMHL_SelectedObjectsList("PearlDataExplorer", "lb_contents")) + if (strsearch(hl_contents_datasets, datapath, 0) < 0) + // path leads to folder + return 0 + endif + goto_dataset_folder(s_selected_file, "") + display_dataset(GetDataFolderDFR(), datapath) + setdatafolder save_df + break + case -1: // control being killed + break + endswitch + + return 0 +End + +/// send file metadata to the ELOG panel +/// +/// metadate is looked up in the following locations: +/// 1. data folder if it exists +/// 2. file info folder inside package folder +/// 3. package folder if it contains preview data from the selected file (???) +/// +static function send_to_elog() + dfref save_df = GetDataFolderDFR() + + dfref preview_df = $package_path + svar /z /sdfr=preview_df s_selected_file + svar /z /sdfr=preview_df s_preview_file + svar /z /sdfr=preview_df s_preview_graph + + if (!SVAR_Exists(s_selected_file) || (strlen(s_selected_file) < 1)) + return 0 + endif + + // check data folder + variable result = -1 + result = goto_dataset_folder(s_selected_file, "") + if (result == 0) + dfref data_df = GetDataFolderDFR() + svar /sdfr=data_df /z authors + if (!svar_Exists(authors)) + result = -1 + endif + endif + + // file info folder + dfref infoDF = preview_df:file_info + if ((result != 0) && (DataFolderRefStatus(infoDF))) + svar /z /sdfr=infoDF s_filepath + if (SVAR_Exists(s_filepath) && (strsearch(s_filepath, s_selected_file, inf, 1) >= 0)) + dfref data_df = infoDF + result = 0 + endif + endif + + // check preview (package) folder + if ((result != 0) && (SVAR_Exists(s_preview_file) && (cmpstr(s_preview_file, s_selected_file) == 0))) + dfref data_df = preview_df + result = 0 + endif + + string graphname + if (SVAR_Exists(s_preview_graph) && (WinType(s_preview_graph) == 1)) + graphname = s_preview_graph + else + graphname = "" + endif + + if (result == 0) + set_elog_attributes(data_df, filename=s_selected_file, graphname=graphname) + string windowname + windowname = StringFromList(0, WinList("*ElogPanel*", ";", "WIN:64"), ";") + DoWindow /F $windowname + endif + + setdatafolder save_df +end + +static function bp_elog(ba) : ButtonControl + STRUCT WMButtonAction &ba + + switch( ba.eventCode ) + case 2: // mouse up + send_to_elog() + break + case -1: // control being killed + break + endswitch + + return 0 +End + diff --git a/pearl/pearl-elog.ipf b/pearl/pearl-elog.ipf index cc1ab4c..e31d0bf 100644 --- a/pearl/pearl-elog.ipf +++ b/pearl/pearl-elog.ipf @@ -273,14 +273,14 @@ function elog_init_pearl_templates() // attributes (persistent) // available attributes - string /g attributes = "author;project;pgroup;sample;source;task;technique;file;valid;" + string /g attributes = "author;project;p-group;sample;source;task;technique;file;valid;" // controls corresponding to attributes // prefix determines the control type: sv_ = setvariable (string), pm_ = popup menu, cb = check box string /g controls = "sv_author;sv_project;sv_pgroup;sv_sample;pm_source;pm_task;pm_technique;sv_file;cb_valid;" // attributes with fixed options, value item declares the options string string /g options = "source=sources;task=tasks;technique=techniques" // attributes which must be defined - string /g required_attributes = "author;project;pgroup;sample;source;task;technique;valid" + string /g required_attributes = "author;project;sample;source;task;technique;valid" // option lists string /g sources = "Manual Entry;PShell;Scienta Data;SScan Data;Prosilica Data;OTF Data;Beamline Status;LEED Data;QMS Data;Matrix Data;Igor Pro;Other" @@ -293,7 +293,7 @@ function elog_init_pearl_templates() // attributes (persistent) // available attributes - string /g attributes = "author;project;pgroup;sample;program;revision;machine;job;experiment;source path;result path;valid" + string /g attributes = "author;project;p-group;sample;program;revision;machine;job;experiment;source path;result path;valid" // controls corresponding to attributes // prefix determines the control type: sv_ = setvariable (string), pm_ = popup menu, cb = check box string /g controls = "sv_author;sv_project;sv_pgroup;sv_sample;pm_program;sv_revision;pm_machine;sv_job;sv_experiment;sv_sourcepath;sv_resultpath;cb_valid" diff --git a/pearl/pearl-pshell-import.ipf b/pearl/pearl-pshell-import.ipf index 1b2482b..68fa39e 100644 --- a/pearl/pearl-pshell-import.ipf +++ b/pearl/pearl-pshell-import.ipf @@ -1,14 +1,16 @@ #pragma TextEncoding = "UTF-8" #pragma rtGlobals=3 // Use modern global access method and strict wave access. -#pragma IgorVersion = 6.36 +#pragma IgorVersion = 8.00 #pragma ModuleName = PearlPShellImport -#pragma version = 1.11 +#pragma version = 2.1 +#if IgorVersion() < 9.00 #include +#endif #include "pearl-compat" #include "pearl-gui-tools" #include "pearl-area-import" -// copyright (c) 2013-21 Paul Scherrer Institut +// copyright (c) 2013-22 Paul Scherrer Institut // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,40 +21,21 @@ /// @brief import data from PShell /// @ingroup ArpesPackage /// -/// /// HDF5 file import from the PShell data acquisition program. -/// the main import functions are: /// -/// - psh5_load_complete() -/// load all scans and datasets from a file. +/// the module provides two main entry functions: /// -/// - psh5_load_reduced() -/// load the ScientaImage dataset of the first scan and reduce its dimensionality. +/// - psh5_load() for almost all data loading tasks including data reduction. +/// - psh5_preview() to load a simple 1d or 2d preview of the first and most relevant dataset in the file. /// -/// - psh5_load_scan_complete() -/// load all datasets of a selected scan. +/// @version up to igor 8, this module requires the HDF5 XOP which must be enabled manually. +/// as of igor 9 and later, HDF5 is built in. /// -/// - psh5_load_scan_preview() -/// load a preview of a selected scan. -/// -/// - psh5_load_dataset() -/// load a selected dataset. -/// -/// - psh5_load_dataset_reduced() -/// load a selected dataset and reduce its dimensionality. -/// -/// the following helper functions are also needed: -/// -/// - psh5_open_file() -/// - psh5_close_file() -/// - psh5_list_scans() -/// - psh5_list_scan_datasets() -/// - psh5_load_scan_meta() -/// - psh5_load_scan_attrs() +/// @version in version 2.0, the interface has changed significantly. /// /// @author matthias muntwiler, matthias.muntwiler@psi.ch /// -/// @copyright 2013-21 Paul Scherrer Institut @n +/// @copyright 2013-22 Paul Scherrer Institut @n /// Licensed under the Apache License, Version 2.0 (the "License"); @n /// you may not use this file except in compliance with the License. @n /// You may obtain a copy of the License at @@ -80,56 +63,429 @@ strconstant kDataDimLabel = "data" strconstant kPreviewDatasets = "ImageEnergyDistribution;ScientaSpectrum;ScientaImage;Counts;SampleCurrent;" /// List of datasets that must be loaded to determine the axis scaling of a Scienta image -strconstant kScientaScalingDatasets = "LensMode;ScientaChannelBegin;ScientaChannelEnd;ScientaSliceBegin;ScientaSliceEnd;" +strconstant kScientaScalingDatasets = "LensMode;ScientaChannelBegin;ScientaChannelEnd;ScientaSliceBegin;ScientaSliceEnd;Eph;" -/// List of datasets that should be transposed upon loading +/// List of diagnostic datasets that are normally loaded with a scan +strconstant kEssentialDiagnostics = "ManipulatorX;ManipulatorY;ManipulatorZ;ManipulatorTheta;ManipulatorTilt;ManipulatorPhi;MonoEnergy;" + +/// List of datasets that must be transposed upon loading strconstant kTransposedDatasets = "ScientaImage;" -/// multiply scienta detector intensity by this value to get actual counts. -constant kDetectorSensitivity = 1 +constant kDSCPositioners = 0x0001 +constant kDSCDetectors = 0x0002 +constant kDSCScientaScaling = 0x0004 +constant kDSCPreview = 0x0008 +constant kDSCEssentialDiags = 0x0010 +constant kDSCAttrs = 0x0020 +constant kDSCDiags = 0x0040 +constant kDSCSnaps = 0x0080 +constant kDSCMeta = 0x0100 +constant kDSCMonitors = 0x0200 +constant kDSCRegions = 0x0400 +constant kDSCOther = 0x8000 +constant kDSCAll = 0xffff -/// open a HDF5 file created by the PShell data acquisition program and prepare the data folder. + +// ====== main import functions ====== + +/// main data loading function /// -/// the function opens a specified or interactively selected HDF5 file, -/// creates a data folder `$ANickName` under root, -/// and changes to the new data folder. +/// load the requested elements from the given file. /// -/// the file must be closed by psh5_close_file() after use. +/// scans, regions and datasets are additive. +/// wildcards can be used to select multiple or all datasets. /// -/// @param ANickName destination folder name (top level under root). +/// classes are subtractive: only datasets of specified classes are loaded. +/// by default, only positioners, detectors, scaling and essential diagnostics are loaded. /// -/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// essential diags, scaling, positioners related to requested detectors are always loaded /// -/// @param AFileName if empty a dialog box shows up +/// data reduction (if specified) applies to 3d data, see psh5_load_dataset_reduced() for details. /// -/// @return ID of open HDF5 file from HDF5OpenFile. -/// zero if an error occurred. +/// @param path_name igor symbolic path name. can be empty if the path is specified in file_name or a dialog box should be displayed +/// +/// @param file_name if empty a dialog box shows up +/// +/// @param dest_df destination folder reference. +/// if dest_df is specified, data is loaded into this folder. +/// else, a new folder derived from the file name is created under root: +/// +/// @param scans semicolon-separated list of scan paths to load. +/// scan groups are at the top level, their name consists of "scan", an optional space and a number. +/// all datasets in the group and sub-groups are considered for loading unless excluded by other arguments. +/// if empty, no datasets are loaded based on their relation to a scan. +/// names are matched by Igor's StringMatch function. +/// the matching is insensitive to case and spaces. +/// to load all scans, pass "/scan*". +/// the leading slash before "scan" can be omitted. +/// +/// @param regions semicolon-separated list of region paths to load. +/// region groups are children of scan groups, their name consists of "region", an optional space and a number. +/// all datasets in the group and sub-groups are considered for loading unless excluded by other arguments. +/// if empty, no datasets are loaded based on their relation to a region. +/// names are matched by Igor's StringMatch function. +/// the matching is insensitive to case and spaces. +/// to load all regions of scan 1, pass "/scan1/region*". +/// to load regions 1 of all scans, pass "/scan*/region1". +/// the leading slash before "scan" can be omitted. +/// +/// @param datasets semicolon-separated list of dataset paths to load. +/// this allows to load individual datasets. +/// names are matched by Igor's StringMatch function against full dataset paths. +/// to load all datasets named "SampleCurrent", pass "*/SampleCurrent". +/// the matching is insensitive to case and spaces. +/// additional datasets may be loaded for scaling. +/// +/// @param classes filter datasets (that were selected by the scans, regions and datasets arguments) by class. +/// this allows, for example, to exclude the diagnostics. +/// note that scaling datasets are always loaded. +/// the value is a bit-wise switch, typically the arithmetic disjunction of kDSCXxxx constants. +/// by default, only positioners, detectors, scaling and essential diagnostics are loaded. +/// to completely load all datasets, specify kDSCAll. +/// +/// @param max_rank load only datasets with lower or equal rank. +/// +/// @return data folder reference of the file-level data folder. same as dest_df if specified. /// /// @return global string s_filepath in new data folder contains the full file path on disk. /// /// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. /// -function psh5_open_file(ANickName, APathName, AFileName) - string ANickName - string APathName - string AFileName +/// @return global string s_loaded_datasets in new data folder contains a list of loaded datasets. +/// the items are full group paths of the HDF5 file. +/// dataset paths can be mapped to loaded data folders using the psh5_dataset_to_folder function. +/// +function /df psh5_load(path_name, file_name, scans, regions, datasets, [classes, max_rank, reduction_func, reduction_params, dest_df]) + string path_name + string file_name + string scans + string regions + string datasets + variable classes + variable max_rank + string reduction_func + string reduction_params + dfref dest_df - setdatafolder root: - newdatafolder /s /o $("root:" + ANickName) - dfref fileDF = GetDataFolderDFR() - - variable fileID - HDF5OpenFile /P=$APathName /R fileID as AFileName - if (v_flag == 0) - string /g s_filepath - string /g s_scanpaths - s_filepath = s_path + s_filename - s_scanpaths = psh5_list_scans(fileID) + dfref save_df = GetDataFolderDFR() + variable timerRefNum = startMSTimer + + if (ParamIsDefault(classes) || (classes == 0)) + classes = kDSCPositioners | kDSCDetectors | kDSCScientaScaling | kDSCEssentialDiags + endif + variable essential_classes = kDSCPositioners | kDSCScientaScaling | kDSCEssentialDiags + + if (ParamIsDefault(dest_df) || !DataFolderRefStatus(dest_df)) + dest_df = psh5_open_file(path_name, file_name) else - fileID = 0 + dest_df = psh5_open_file(path_name, file_name, dest_df=dest_df) + endif + if (ParamIsDefault(reduction_func)) + reduction_func = "" + endif + if (ParamIsDefault(reduction_params)) + reduction_params = "" endif - return fileID + if (DataFolderRefStatus(dest_df)) + setdatafolder dest_df + psh5_load_general_group(dest_df) + + // datasets contained in file + svar /sdfr=dest_df file_datasets = s_datasets + + // datasets contained in file up to allowed rank + string ranked_datasets = "" + if (ParamIsDefault(max_rank)) + ranked_datasets = file_datasets + else + svar /sdfr=dest_df file_ranks = s_datasets_ranks + ranked_datasets = psh5_filter_datasets_rank(file_datasets, file_ranks, 0, max_rank) + endif + + string matching_datasets = "" + string matching_essentials = "" + string scan_datasets = "" + string region_datasets = "" + string free_datasets = "" + string selected_datasets = "" + string essential_datasets = "" + + variable i_item + variable n_items + string item + + // select datasets belonging to selected scans + n_items = ItemsInList(scans, ";") + for (i_item = 0; i_item < n_items; i_item += 1) + item = StringFromList(i_item, scans, ";") + if (cmpstr(item[0,3], "scan") == 0) + item = "/" + item + endif + item = ReplaceString("//", item + "/*", "/") + matching_datasets = psh5_match_datasets(ranked_datasets, item) + scan_datasets = scan_datasets + matching_datasets + endfor + + // select datasets belonging to selected regions + n_items = ItemsInList(regions, ";") + for (i_item = 0; i_item < n_items; i_item += 1) + item = StringFromList(i_item, regions, ";") + if (cmpstr(item[0,3], "scan") == 0) + item = "/" + item + endif + item = ReplaceString("//", item + "/*", "/") + matching_datasets = psh5_match_datasets(ranked_datasets, item) + region_datasets = region_datasets + matching_datasets + endfor + + // free select datasets + n_items = ItemsInList(datasets, ";") + for (i_item = 0; i_item < n_items; i_item += 1) + item = StringFromList(i_item, datasets, ";") + if (cmpstr(item[0,3], "scan") == 0) + item = "/" + item + endif + matching_datasets = psh5_match_datasets(ranked_datasets, item) + free_datasets = free_datasets + matching_datasets + endfor + + selected_datasets = scan_datasets + region_datasets + free_datasets + + string filtered_datasets = "" + string diag_datasets = "" + string selected_scans = psh5_extract_scan_paths(selected_datasets) + variable i_scan + variable n_scans = ItemsInList(selected_scans) + string scan + string selected_regions = psh5_extract_region_paths(selected_datasets) + variable i_region + variable n_regions = ItemsInList(selected_regions) + string region + string positioners + string detectors + + // datasets directly under one of the selected regions + region_datasets = "" + for (i_region = 0; i_region < n_regions; i_region += 1) + region = StringFromList(i_region, selected_regions, ";") + region_datasets = region_datasets + GrepList(file_datasets, "(?i)^" + region + "[[:alpha:]]+$") + endfor + + // filter selected datasets by class and add essential dependencies + // each scan may have specific positioners and detectors + for (i_scan = 0; i_scan < n_scans; i_scan += 1) + scan = StringFromList(i_scan, selected_scans, ";") + + // potentially interesting diagnostics of current scan and selected regions + diag_datasets = psh5_match_datasets(file_datasets, scan + "*") + diag_datasets = psh5_match_dataset_classes(diag_datasets, kDSCAttrs | kDSCDiags | kDSCSnaps) + diag_datasets = diag_datasets + GrepList(file_datasets, "(?i)^" + scan + "[[:alpha:]]+$") + diag_datasets = diag_datasets + psh5_match_datasets(region_datasets, scan + "*") + + // explicit positioners and detectors set by pshell + setdatafolder dest_df + dfref scan_df = psh5_create_folders(scan) + setdatafolder scan_df + psh5_load_scan_meta(dest_df, scan) + wave /t /z ScanWritables + wave /t /z ScanReadables + if (WaveExists(ScanWritables)) + positioners = twave2list(ScanWritables, ";") + else + positioners = "" + endif + if (WaveExists(ScanReadables)) + detectors = twave2list(ScanReadables, ";") + else + detectors = "" + endif + + // filtering by classes + matching_datasets = psh5_match_dataset_classes(selected_datasets, classes, positioners=positioners, detectors=detectors) + + // add essential diags + if (strlen(matching_datasets) > 1) + essential_datasets = psh5_match_dataset_classes(diag_datasets, essential_classes, positioners=positioners, detectors=detectors) + endif + + // scaling datasets before detectors because data reduction needs the scales + filtered_datasets = essential_datasets + filtered_datasets + matching_datasets + endfor + + // load the datasets + setdatafolder dest_df + string /g s_loaded_datasets = "" + s_loaded_datasets = psh5_load_datasets(dest_df, filtered_datasets, reduction_func=reduction_func, reduction_params=reduction_params) + + // apply scaling by scan + for (i_scan = 0; i_scan < n_scans; i_scan += 1) + scan = StringFromList(i_scan, selected_scans, ";") + ps_scale_datasets(psh5_dataset_to_folder(dest_df, scan)) + endfor + + psh5_close_file(dest_df) + + // performance reporting + if (timerRefNum >= 0) + setdatafolder dest_df + variable /g psh5_perf_secs + psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 + endif + endif + + setdatafolder save_df + return dest_df +end + +/// load preview +/// +/// load information about the file structure and a preview dataset +/// +function /df psh5_preview(path_name, file_name, [dest_df, preview_datasets]) + string path_name + string file_name + dfref dest_df + string preview_datasets + + dfref save_df = GetDataFolderDFR() + + if (ParamIsDefault(dest_df)) + dest_df = psh5_open_file(path_name, file_name) + else + dest_df = psh5_open_file(path_name, file_name, dest_df=dest_df) + endif + if (ParamIsDefault(preview_datasets)) + preview_datasets = kPreviewDatasets + endif + variable essential_classes = kDSCPositioners | kDSCScientaScaling + + if (DataFolderRefStatus(dest_df)) + setdatafolder dest_df + psh5_load_general_group(dest_df) + + // select dataset based on preference + svar /sdfr=dest_df file_datasets = s_datasets + svar /sdfr=dest_df file_datasets_ranks = s_datasets_ranks + string selected_datasets = "" + string essential_datasets = "" + string scan_datasets = "" + string filtered_datasets = "" + + variable nds = ItemsInList(preview_datasets, ";") + variable ids + string ds + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(ids, preview_datasets, ";") + selected_datasets = psh5_filter_datasets_rank(file_datasets, file_datasets_ranks, 1, 2) + selected_datasets = psh5_match_datasets(selected_datasets, "/scan*" + ds) + if (strlen(selected_datasets) > 1) + selected_datasets = StringFromList(0, selected_datasets, ";") + break + endif + endfor + + // add essential dependencies + if (strlen(selected_datasets) > 1) + string selected_scans = psh5_extract_scan_paths(selected_datasets) + string scan + string positioners + + scan = StringFromList(0, selected_scans, ";") + scan_datasets = psh5_match_datasets(file_datasets, scan + "*") + + psh5_load_scan_meta(dest_df, scan) + wave /t /z ScanWritables + if (WaveExists(ScanWritables)) + positioners = twave2list(ScanWritables, ";") + else + positioners = "" + endif + + essential_datasets = psh5_match_dataset_classes(scan_datasets, essential_classes, positioners=positioners) + filtered_datasets = essential_datasets + selected_datasets + + // load the datasets + psh5_load_datasets(dest_df, filtered_datasets, create_folders=0) + ps_scale_datasets(dest_df) + string /g s_preview_dataset = StringFromList(0, selected_datasets, ";") + string /g s_preview_wave = StringFromList(ItemsInList(s_preview_dataset, "/") - 1, s_preview_dataset, "/") + endif + + psh5_close_file(dest_df) + endif + + setdatafolder save_df + return dest_df +end + +/// open a HDF5 file created by the PShell data acquisition program and prepare the data folder. +/// +/// the function opens a specified or interactively selected HDF5 file, +/// and loads general information about the file +/// including a list of contained datasets. +/// +/// data can be loaded into an existing or new data folder under root. +/// +/// the file must be closed by psh5_close_file() after use. +/// the HDF5 file ID is stored in the global variable file_id until the file is closed. +/// +/// @param path_name igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed +/// +/// @param file_name if empty a dialog box shows up +/// +/// @param dest_df destination folder reference. +/// if dest_df is specified, data is loaded into this folder. +/// else, by default, a new folder derived from the file name is created in root: +/// +/// @return the return value of the function is a data folder reference of the created data folder. +/// +/// @return global variable file_id contains ID number of open HDF5 file from HDF5OpenFile. +/// zero if an error occurred. +/// +/// @return global string s_filepath in new data folder contains the full file path on disk. +/// +/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. +/// +function /df psh5_open_file(path_name, file_name, [dest_df]) + string path_name + string file_name + dfref dest_df + + dfref save_df = GetDataFolderDFR() + + variable fid + HDF5OpenFile /P=$path_name /R fid as file_name + if (v_flag == 0) + if (!ParamIsDefault(dest_df)) + setdatafolder dest_df + else + string dest_name = ad_suggest_foldername(s_filename, sourcename="psh") + setdatafolder root: + newdatafolder /s /o $("root:" + dest_name) + endif + dfref file_df = GetDataFolderDFR() + + variable /g file_id = fid + string /g s_filepath + string /g s_scanpaths + string /g s_datasets + string datatypes + string ranks + string dimensions + s_filepath = s_path + s_filename + s_scanpaths = psh5_list_scans(file_id) + s_datasets = psh5_list_all_datasets(file_id) + [datatypes, ranks, dimensions] = psh5_list_dataset_info(file_id, s_datasets) + string /g s_datasets_datatypes = datatypes + string /g s_datasets_ranks = ranks + string /g s_datasets_dimensions = dimensions + else + dfref file_df = $"" + endif + + setdatafolder save_df + return file_df end /// close a HDF5 file opened by psh5_open_file. @@ -137,352 +493,88 @@ end /// this function just closes the HDF5 file. /// no change is made to the loaded data. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// the function requires the specified data folder to contain a variable named `file_id` +/// that specifies the HDF5 file ID. +/// the variable may also be in a parent folder. +/// the variable is killed after the file has been closed. +/// if the folder or variable can't be found, the function does nothing. /// -function psh5_close_file(fileID) - variable fileID +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// the reference may also point to a child folder, +/// the function will look for a file_id variable in all parent folders. +/// +/// @note on the command line, data folder references can be specified using the $-notation like `$"foldername"`. +/// the current folder is written as `$":"`. +/// +function psh5_close_file(file_df) + dfref file_df - HDF5CloseFile fileID -end - -/// load everything from a PShell data file. -/// -/// @param ANickName destination folder name (top level under root) -/// -/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed -/// -/// @param AFileName if empty a dialog box shows up -/// -/// @param load_data select whether datasets (positioners and detectors) are loaded. -/// @arg 1 (default) load data. -/// @arg 0 do not load data. -/// -/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. -/// for proper wave scaling, the attributes must be loaded. -/// @arg 1 (default) load attributes. -/// @arg 0 do not load attributes. -/// -/// @return complete path of the loaded file if successful. -/// empty string otherwise. -/// -/// @return global string s_filepath in new data folder contains the full file path on disk. -/// -/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. -/// -function /s psh5_load_complete(ANickName, APathName, AFileName, [load_data, load_attr]) - string ANickName - string APathName - string AFileName - variable load_data - variable load_attr - - if (ParamIsDefault(load_data)) - load_data = 1 - endif - if (ParamIsDefault(load_attr)) - load_attr = 1 - endif - - dfref saveDF = GetDataFolderDFR() - - // performance monitoring - variable timerRefNum - variable /g psh5_perf_secs - timerRefNum = startMSTimer - - variable fileID = psh5_open_file(ANickName, APathName, AFileName) - if (fileID) - dfref fileDF = GetDataFolderDFR() - svar s_filepath - svar s_scanpaths - AFileName = s_filepath - print "loading " + s_filepath + "\r" - - variable ig - variable ng = ItemsInList(s_scanpaths, ";") - string sg - string folder - - for (ig = 0; ig < ng; ig += 1) - sg = StringFromList(ig, s_scanpaths, ";") - folder = ReplaceString("/", sg, "") - folder = ReplaceString(" ", folder, "") - folder = PearlCleanupName(folder) - setdatafolder fileDF - newdatafolder /s /o $folder - psh5_load_scan_complete(fileID, sg, load_data=load_data, load_attr=load_attr) - endfor - - psh5_close_file(fileID) - else - AFileName = "" - endif - - psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 - - setdatafolder saveDF - return AFileName -end - -/// load a preview image from a PShell data file. -/// -/// the data wave is loaded into the current data folder. -/// attributes are loaded into the attr subfolder. existing waves in attr are deleted. -/// -/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed -/// -/// @param AFileName if empty a dialog box shows up -/// -/// @param load_data 1 (default): load data; 0: do not load data -/// -/// @param load_attr 1 (default): load attributes; 0: do not load attributes -/// note: for correct scaling of the image, the attributes need to be loaded -/// -/// @param pref_scans semicolon-separated list of preferred scans. -/// the items of the list are match strings for the Igor StringMatch function. -/// the first matching scan (i.e. top-level HDF5 group with a matching name) is loaded from the file. -/// if no match is found, the first scan is loaded. -/// -/// @param pref_datasets semicolon-separated list of preferred datasets. -/// the items of the list are match strings for the Igor StringMatch function. -/// the first matching dataset is loaded from the file. -/// if no match is found, the first dataset listed in the file is loaded. -/// -/// @return name of loaded preview wave. -/// -function /s psh5_load_preview(APathName, AFileName, [load_data, load_attr, pref_scans, pref_datasets]) - string APathName - string AFileName - variable load_data - variable load_attr - string pref_scans - string pref_datasets - - if (ParamIsDefault(load_data)) - load_data = 1 - endif - if (ParamIsDefault(load_attr)) - load_attr = 1 - endif - if (ParamIsDefault(pref_scans)) - pref_scans = "*scan1*;" - endif - if (ParamIsDefault(pref_datasets)) - pref_datasets = "" - endif - - dfref saveDF = GetDataFolderDFR() - - variable fileID - string scanpaths = "" - string dataname = "" - - // performance monitoring - variable timerRefNum - variable /g adh5_perf_secs - timerRefNum = startMSTimer - - HDF5OpenFile /P=$APathName /R /Z fileID as AFileName - if (v_flag == 0) - AFileName = s_path + s_filename - dfref fileDF = GetDataFolderDFR() - - scanpaths = psh5_list_scans(fileID) - variable ng = ItemsInList(scanpaths) - variable ig - string sg - variable np = ItemsInList(pref_scans) - variable ip - string sp - variable found = 0 - if (ng > 0) - for (ip = 0; ip < np; ip += 1) - for (ig = 0; ig < ng; ig += 1) - sg = StringFromList(ig, scanpaths) - sp = StringFromList(ip, pref_scans) - if (StringMatch(sg, sp)) - found = 1 - break - endif - endfor - if (found) - break - endif - endfor - if (!found) - ig = 0 - endif - sg = StringFromList(ig, scanpaths) - - if (load_attr) - setdatafolder fileDF - newdatafolder /o/s attr - killwaves /a/z - psh5_load_scan_attrs(fileID, sg) - endif - - setdatafolder fileDF - dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr, pref_datasets=pref_datasets) + if (DataFolderRefStatus(file_df)) + nvar /sdfr=file_df /z file_id + if (nvar_Exists(file_id)) + HDF5CloseFile /z file_id + file_id = 0 + KillVariables /z file_id else - print "no scans found in file " + AFileName - endif - - HDF5CloseFile fileID - endif - - if (timerRefNum >= 0) - adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 - endif - - setdatafolder saveDF - return dataname -end - -/// load organizational metadata from the general group. -/// -/// the general group contains the following datasets: -/// authors, pgroup, proposal, proposer, sample. -/// -/// data is loaded into the current data folder. -/// all items are loaded into strings, authors is a comma-separated list. -/// missing items default to empty strings. -/// -/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed -/// -/// @param AFileName if empty a dialog box shows up -/// -/// @return semicolon-separated list of the objects. -/// -function /s psh5_load_general_group(APathName, AFileName) - string APathName - string AFileName - - variable fileID - - HDF5OpenFile /P=$APathName /R /Z fileID as AFileName - if (v_flag == 0) - string obj_names = "authors;pgroup;proposal;proposer;sample;" - variable nn = ItemsInList(obj_names, ";") - variable ii - string name - - for (ii = 0; ii < nn; ii += 1) - name = StringFromList(ii, obj_names, ";") - psh_load_general_string(fileID, name) - endfor - - return obj_names - else - return "" - endif -end - -/// load a string from the general group. -/// -/// the general group contains the following datasets: -/// authors, pgroup, proposal, proposer, sample. -/// -/// data is loaded into a global string in the current data folder. -/// arrays with multiple items are loaded into a comma-separated list. -/// a missing item defaults to the empty string. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @return comma-separated list of values. -/// -function /s psh_load_general_string(fileID, name) - variable fileID - string name - - string path = "/general/" + name - HDF5LoadData /O /Q /Z /N=wt_load_general /TYPE=1 fileID, path - string values = "" - if (!v_flag) - wave /t wt_load_general - variable nn = numpnts(wt_load_general) - variable ii - for (ii = 0; ii < nn; ii += 1) - values = AddListItem(wt_load_general[ii], values, ",", inf) - endfor - killwaves /z wt_load_general - if (strlen(values) >= 1) - values = values[0,strlen(values)-2] + dfref parent_df = $(GetDataFolder(1, file_df) + ":") + if (DataFolderRefStatus(parent_df)) + psh5_close_file(parent_df) + endif endif endif - string /g $name = values - return values end -/// load all data of a selected scan from a PShell data file. -/// -/// data is loaded into the current data folder. -/// attribute datasets are loaded into sub-folder `attr`. -/// region datasets are loaded into region sub-folders. -/// existing data, if present, is overwritten. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @param load_data select whether datasets (positioners and detectors) are loaded. -/// @arg 1 (default) load data. -/// @arg 0 do not load data. -/// -/// @param load_attr select whether attributes (auxiliary device readbacks) are loaded. -/// for proper wave scaling, the attributes must be loaded. -/// @arg 1 (default) load attributes. -/// @arg 0 do not load attributes. -/// -/// @return semicolon-separated list of the loaded data waves (excluding attributes). -/// -function /s psh5_load_scan_complete(fileID, scanpath, [load_data, load_attr]) - variable fileID - string scanpath - variable load_data - variable load_attr - - if (ParamIsDefault(load_data)) - load_data = 1 - endif - if (ParamIsDefault(load_attr)) - load_attr = 1 - endif - - dfref saveDF = GetDataFolderDFR() - - dfref dataDF = GetDataFolderDFR() - string wavenames - string attrnames - psh5_load_scan_meta(fileID, scanpath) - if (load_attr) - newdatafolder /s /o attr - attrnames = psh5_load_scan_attrs(fileID, scanpath) - endif - if (load_data) - setdatafolder dataDF - wavenames = psh5_load_scan_data(fileID, scanpath) - endif - if (load_data && load_attr) - setdatafolder dataDF - ps_scale_datasets() - endif +// === datasets and paths === - setdatafolder saveDF - return wavenames +/// convert text wave to list. +/// +/// +static function /s twave2list(wt, sep) + wave /t wt + string sep + + string list = "" + variable n = numpnts(wt) + variable i + for (i = 0; i < n; i += 1) + list = AddListItem(wt[i], list, sep, inf) + endfor + + return list +end + +/// convert numeric wave to list. +/// +/// +static function /s wave2list(w, format, sep) + wave w + string format + string sep + + string list = "" + variable n = numpnts(w) + variable i + string s + for (i = 0; i < n; i += 1) + sprintf s, format, w[i] + list = AddListItem(s, list, sep, inf) + endfor + + return list end /// list scan groups of a PShell data file. /// /// the function returns a list of all top-level groups whose name starts with "scan". /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// @param file_id ID of open HDF5 file from psh5_open_file(). /// /// @return semicolon-separated list of group paths. /// -function /s psh5_list_scans(fileID) - variable fileID +function /s psh5_list_scans(file_id) + variable file_id - HDF5ListGroup /F /TYPE=1 fileID, "/" + HDF5ListGroup /F /TYPE=1 file_id, "/" variable ig variable ng = ItemsInList(S_HDF5ListGroup, ";") @@ -499,252 +591,550 @@ function /s psh5_list_scans(fileID) return scans end -/// list datasets of a PShell scan group. +/// list all datasets in a file /// -/// the function returns a list of all datasets of the selected scan. -/// this does not include datasets from the attributes sub-group. +/// the function returns a list of all datasets in a file. +/// each dataset is listed by its full path like, e.g., "/scan 1/region 1/dataset 1". /// -/// @note in a future version, an option may be introduced to filter datasets by function (_Readable_ and/or _Writable_). +/// this function wraps a one-line HDF5 operation and is provided just to be more memorable. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// @param file_id ID of open HDF5 file from psh5_open_file(). /// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// @return semicolon-separated list of absolute dataset paths. /// -/// @return semicolon-separated list of dataset paths. -/// -/// @version since version 1.03 this function returns paths relative to scanpath. -/// -function /s psh5_list_scan_datasets(fileID, scanpath, [include_regions]) - variable fileID - string scanpath - variable include_regions +function /s psh5_list_all_datasets(file_id) + variable file_id - if (ParamIsDefault(include_regions)) - include_regions = 0 + HDF5ListGroup /F /R /TYPE=2 /Z file_id, "/" + if (!v_flag) + return S_HDF5ListGroup + else + return "" endif - string result - - HDF5ListGroup /TYPE=2 /Z fileID, scanpath - result = S_HDF5ListGroup - - if (include_regions) - HDF5ListGroup /R /TYPE=2 /Z fileID, scanpath - variable n = ItemsInList(S_HDF5ListGroup) - variable i - string ds - string region_datasets - for (i = 0; i < n; i += 1) - ds = StringFromList(i, S_HDF5ListGroup) - if (StringMatch(ds, "region*/*")) - //region_datasets = psh5_list_scan_datasets(fileID, ReplaceString("//", scanpath + "/" + region, "/"), include_regions=0) - result = AddListItem(ds, result, ";", inf) - endif - endfor - endif - - return result end -/// list regions of a PShell scan group. +/// list data types and dimensions of datasets /// -/// the function returns a list of all region groups of the selected scan. +/// this function has multiple returns. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// @param file_id ID of open HDF5 file from psh5_open_file(). /// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// @return semicolon-separated list of (simplified) datatypes. +/// datatypes are marked as "i" (integer), "f" (float), "s" (string) or "?" (unknown). /// -/// @return semicolon-separated list of datagroup paths. +/// @return semicolon-separated list of ranks (number of dimensions). /// -function /s psh5_list_scan_regions(fileID, scanpath) - variable fileID - string scanpath +/// @return semicolon-separated list of dimensions. +/// each item is a comma-separated list of dimension sizes. +/// items do not contain trailing commas. +/// +function [string datatypes, string ranks, string dimensions] psh5_list_dataset_info(variable file_id, string datasets) + variable nds = ItemsInList(datasets, ";") + variable ids + string sds + STRUCT HDF5DataInfo di + InitHDF5DataInfo(di) + variable err + variable idim + string sdims + datatypes = "" + ranks = "" + dimensions = "" + + for (ids = 0; ids < nds; ids += 1) + sds = StringFromList(ids, datasets, ";") + err = HDF5DatasetInfo(file_id, sds, 0, di) + if (err == 0) + switch (di.datatype_class) + case H5T_INTEGER: + datatypes = AddListItem("i", datatypes, ";", ids) + break + case H5T_FLOAT: + datatypes = AddListItem("f", datatypes, ";", ids) + break + case H5T_STRING: + datatypes = AddListItem("s", datatypes, ";", ids) + break + default: + datatypes = AddListItem("?", datatypes, ";", ids) + break + endswitch + + ranks = AddListItem(num2str(di.ndims), ranks, ";", ids) + + sdims = "" + for (idim = 0; idim < di.ndims; idim += 1) + sdims = AddListItem(num2str(di.dims[idim]), sdims, ",", idim) + endfor + if (strlen(sdims) > 1) + sdims = sdims[0, strlen(sdims)-2] + endif + dimensions = AddListItem(sdims, dimensions, ";", ids) + endif + endfor +end + +/// filter a list of datasets by string matching +/// +/// this function can be used to extract certain dataset paths +/// from a list of all datasets in a file. +/// the matching is insensitive to spaces and case. +/// +/// examples match strings: +/// - `"*/scan1/region1/*"` match all datasets in scan 1, region 1 +/// - `"!*/diags/*"` remove diagnostics from list +/// +/// @param datasets semicolon separated list of dataset paths +/// @param match match string for igor's StringMatch function +/// +/// @return list of matching datasets +/// +function /s psh5_match_datasets(datasets, match) + string datasets + string match - HDF5ListGroup /TYPE=1 /Z fileID, scanpath - variable n = ItemsInList(S_HDF5ListGroup) - variable i string result = "" - string s - for (i = 0; i < n; i += 1) - s = StringFromList(i, S_HDF5ListGroup) - if (StringMatch(s, "region*")) - result = AddListItem(s, result, ";", inf) + + string spaceless_datasets = ReplaceString(" ", datasets, "") + string spaceless_match = ReplaceString(" ", match, "") + + string sep = ";" + variable seplen = strlen(sep) + variable nds = ItemsInList(spaceless_datasets, sep) + variable ids + string ds + variable offset = 0 + + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(0, spaceless_datasets, sep, offset) + offset += strlen(ds) + seplen + if (StringMatch(ds, spaceless_match)) + ds = StringFromList(ids, datasets, sep, 0) + result = AddListItem(ds, result, sep, inf) endif endfor return result end -/// load all datasets of a PShell scan group. +/// filter datasets by rank /// -/// data is loaded into the current data folder. -/// region datasets are loaded into the respective region sub-folders. +/// @param datasets semicolon-separated list of datasets to be checked. /// -/// this function does not scale the datasets. -/// call ps_scale_datasets() separately. +/// @param ranks semicolon-separated list of ranks of each dataset. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// @return filtered dataset list. /// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @return semicolon-separated list of the loaded waves. -/// -function /s psh5_load_scan_data(fileID, scanpath) - variable fileID - string scanpath +function /s psh5_filter_datasets_rank(datasets, ranks, min_rank, max_rank) + string datasets + string ranks + variable min_rank + variable max_rank - string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1) - variable nds = ItemsInList(datasets) + string result = "" + string sep = ";" + variable seplen = strlen(sep) + variable nds = ItemsInList(datasets, sep) variable ids - string sds - string sw - string wavenames = "" - for (ids = 0; ids < nds; ids += 1) - sds = StringFromList(ids, datasets) - sw = psh5_load_dataset(fileID, scanpath, sds, set_scale=0) - wavenames = AddListItem(sw, wavenames, ";", inf) - endfor + string ds + variable offset = 0 + variable rank - return wavenames -end - -/// load attributes of a PShell scan group. -/// -/// "attributes" are the auxiliary data inside the attrs group. -/// do not confuse with HDF5 attributes! -/// HDF5 attributes are loaded by the psh5_load_scan_meta() function. -/// -/// data is loaded into the current data folder. -/// this should normally be the `:attr` folder inside the respective scan folder. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @param attr_sets specify the attribute sets to be loaded. -/// this value can be an arithmetic OR of the following constants. -/// by default, all attributes are loaded. -/// @arg 1 all datasets that are present in the file. -/// @arg 2 datasets relevant for wave scaling of Scienta data. -/// -/// @return semicolon-separated list of the loaded waves. -/// -function /s psh5_load_scan_attrs(fileID, scanpath, [attr_sets]) - variable fileID - string scanpath - variable attr_sets - - if (ParamIsDefault(attr_sets)) - attr_sets = 1 - endif - - string attr_path = ReplaceString("//", scanpath + "/attrs", "/") - string attr_list = "" - if (attr_sets & 1) - HDF5ListGroup /TYPE=2 /Z fileID, attr_path - if (!v_flag) - attr_list = S_HDF5ListGroup - endif - endif - - variable ids - variable nds - string sds - - if (attr_sets & 2) - nds = ItemsInList(kScientaScalingDatasets, ";") - for (ids = 0; ids < nds; ids += 1) - sds = StringFromList(ids, kScientaScalingDatasets) - if (WhichListItem(sds, attr_list) < 0) - attr_list = AddListItem(sds, attr_list, ";", inf) - endif - endfor - endif - - nds = ItemsInList(attr_list, ";") - string wavenames = "" for (ids = 0; ids < nds; ids += 1) - sds = StringFromList(ids, attr_list, ";") - HDF5LoadData /O /Q /Z fileID, attr_path + "/" + sds - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + ds = StringFromList(0, datasets, sep, offset) + offset += strlen(ds) + seplen + rank = str2num(StringFromList(ids, ranks, sep)) + if ((rank >= min_rank) && (rank <= max_rank)) + result = AddListItem(ds, result, sep, inf) endif endfor - wavenames = ReplaceString(";;", wavenames, ";") - return wavenames + return result end -/// load metadata of a PShell scan group. +/// remove duplicate items from list /// -/// _metadata_ are the HDF5 attributes attached to the scan group. -/// the following attributes are loaded. -/// the respective wave names under Igor are given in parentheses. +/// @param list semicolon-separated list of strings. +/// strings can contain any printable character except the semicolon. /// -/// - Dimensions (ScanDimensions) -/// - Writables (ScanWritables) -/// - Readables (ScanReadables) -/// - Steps (ScanSteps) -/// - Iterations (ScanIterations) - if present (XPSSpectrum script) -/// - Step Size (ScanStepSize) - if present (XPSSpectrum script) -/// - Step Time (ScanStepTime) - if present (XPSSpectrum script) +/// @return list of strings with duplicates (second and further instances) removed. +/// all remaining items retain the position of their first occurrence in the original list. +/// the function uses Igor's FindDuplicates operation. /// -/// if they are missing in the file, `ScanDimensions` and `ScanReadables` are set to default values -/// assuming the file contains a single spectrum. -/// -/// data is loaded into the current data folder. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @return semicolon-separated list of the loaded waves. -/// -function /s psh5_load_scan_meta(fileID, scanpath) - variable fileID - string scanpath - string wavenames = "" +static function /s unique_strings(list) + string list + + string result = "" - HDF5LoadData /O /Q /Z /A="Dimensions" /N=ScanDimensions /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) - else - make /n=1 /o ScanDimensions - ScanDimensions = 0 - wavenames = AddListItem("ScanDimensions", wavenames, ";", inf) - endif - HDF5LoadData /O /Q /Z /A="Readables" /N=ScanReadables /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) - else - make /n=1 /o /t ScanReadables - ScanReadables[0] = "ScientaSpectrum" - wavenames = AddListItem("ScanReadables", wavenames, ";", inf) - endif - HDF5LoadData /O /Q /Z /A="Writables" /N=ScanWritables /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) - endif - HDF5LoadData /O /Q /Z /A="Steps" /N=ScanSteps /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) - endif - wavenames = ReplaceString(";;", wavenames, ";") + string sep = ";" + variable seplen = strlen(sep) + variable nn = ItemsInList(list, sep) + variable ii + string item + variable offset = 0 - // additional attributes from XPSSpectrum.py - HDF5LoadData /O /Q /Z /A="Iterations" /N=ScanIterations /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + make /n=(nn) /t /free wt_in + for (ii = 0; ii < nn; ii += 1) + item = StringFromList(0, list, sep, offset) + offset += strlen(item) + seplen + wt_in[ii] = item + endfor + + FindDuplicates /Z /FREE /RT=wt_out wt_in + + return twave2list(wt_out, ";") +end + +/// trim dataset paths to the scan part +/// +/// return dataset paths stripped to the form /scan*/. +/// +/// the function matches each path for a scan token in the first path element +/// and strips off the remaining path. +/// if there are no scan-based datasets, the function returns an empty string. +/// +/// the function operates on a single path or a semicolon-separated list of paths. +/// the items of the returned list are unique. +/// +/// @param datasets semicolon separated list of dataset paths +/// +/// @return list of scan paths (no duplicates) +/// +function /s psh5_extract_scan_paths(datasets) + string datasets + + string result = "" + string sep = ";" + variable seplen = strlen(sep) + variable nds = ItemsInList(datasets, sep) + variable ids + string ds + string scan + string item + variable offset = 0 + + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(0, datasets, sep, offset) + offset += strlen(ds) + seplen + if (cmpstr(ds[0], "/") != 0) + ds = "/" + ds + endif + + scan = StringFromList(1, ds, "/") + if (StringMatch(scan, "scan*")) + item = "/" + scan + "/" + else + item = "" + endif + + if ((strlen(item) > 0) && (WhichListItem(item, result, ";", 0, 0) < 0)) + result = AddListItem(item, result, ";", inf) + endif + endfor + + return result +end + +/// trim dataset paths to the scan/region part +/// +/// return dataset paths stripped to the form /scan*/region*/. +/// +/// the function matches each path for scan and region tokens in the first two path elements +/// and strips off the remainder. +/// if there are no region-based datasets, the function returns an empty string. +/// +/// the function operates on a single path or a semicolon-separated list of paths. +/// the items of the returned list are unique. +/// +/// @param datasets semicolon separated list of dataset paths +/// +/// @return list of scan/region paths (no duplicates) +/// +function /s psh5_extract_region_paths(datasets) + string datasets + + string result = "" + string sep = ";" + variable seplen = strlen(sep) + variable nds = ItemsInList(datasets, sep) + variable ids + string ds + string scan + string region + string item + variable offset = 0 + + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(0, datasets, sep, offset) + offset += strlen(ds) + seplen + if (cmpstr(ds[0], "/") != 0) + ds = "/" + ds + endif + + scan = StringFromList(1, ds, "/") + region = StringFromList(2, ds, "/") + if (StringMatch(scan, "scan*") && StringMatch(region, "region*")) + item = "/" + scan + "/" + region + "/" + else + item = "" + endif + + if ((strlen(item) > 0) && (WhichListItem(item, result, ";", 0, 0) < 0)) + result = AddListItem(item, result, ";", inf) + endif + endfor + + return result +end + +/// filter a list of datasets by classification +/// +/// @param datasets semicolon separated list of dataset paths +/// +/// @param classes dataset classes. +/// arithmetic OR of the kDSCXxxx constants. +/// +/// @return list of scan/region paths (no duplicates) +/// +function /s psh5_match_dataset_classes(datasets, classes, [positioners, detectors]) + string datasets + variable classes + string positioners + string detectors + + if (ParamIsDefault(positioners)) + positioners = "" endif - HDF5LoadData /O /Q /Z /A="Step Size" /N=ScanStepSize /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + if (ParamIsDefault(detectors) || (strlen(detectors) == 0)) + detectors = kPreviewDatasets endif - HDF5LoadData /O /Q /Z /A="Step Time" /N=ScanStepTime /TYPE=1 fileID, scanpath - if (!v_flag) - wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + + string result = "" + string sep = ";" + variable seplen = strlen(sep) + variable nds = ItemsInList(datasets, sep) + variable ids + variable offset = 0 + string ds + variable nparts + string ds_parent + string ds_name + string ds_scan_rel + variable ds_class + + for (ids = 0; ids < nds; ids += 1) + ds = StringFromList(0, datasets, sep, offset) + offset += strlen(ds) + seplen + nparts = ItemsInList(ds, "/") + ds_parent = StringFromList(nparts - 2, ds, "/") + ds_name = StringFromList(nparts - 1, ds, "/") + if (cmpstr(ds[0,4], "/scan") == 0) + ds_scan_rel = RemoveListItem(0, ds[1, strlen(ds) - 1], "/") + else + ds_scan_rel = ds + endif + ds_class = 0 + + if (strlen(ds_parent) > 0) + ds_class = ds_class | (cmpstr(ds_parent, "attr") == 0 ? kDSCAttrs : 0) + ds_class = ds_class | (cmpstr(ds_parent, "attrs") == 0 ? kDSCAttrs : 0) + ds_class = ds_class | (cmpstr(ds_parent, "diags") == 0 ? kDSCDiags : 0) + ds_class = ds_class | (cmpstr(ds_parent, "snaps") == 0 ? kDSCSnaps : 0) + ds_class = ds_class | (cmpstr(ds_parent, "meta") == 0 ? kDSCMeta : 0) + ds_class = ds_class | (cmpstr(ds_parent, "monitors") == 0 ? kDSCMonitors : 0) + ds_class = ds_class | (cmpstr(ds_parent[0,5], "region") == 0 ? kDSCRegions : 0) + endif + + if (strlen(ds_name) > 0) + ds_class = ds_class | (WhichListItem(ds_scan_rel, positioners, sep, 0, 0) >= 0 ? kDSCPositioners : 0) + ds_class = ds_class | (WhichListItem(ds_scan_rel, detectors, sep, 0, 0) >= 0 ? kDSCDetectors : 0) + ds_class = ds_class | (WhichListItem(ds_name, kPreviewDatasets, sep, 0, 0) >= 0 ? kDSCPreview : 0) + ds_class = ds_class | (WhichListItem(ds_name, kScientaScalingDatasets, sep, 0, 0) >= 0 ? kDSCScientaScaling : 0) + ds_class = ds_class | (WhichListItem(ds_name, kEssentialDiagnostics, sep, 0, 0) >= 0 ? kDSCEssentialDiags : 0) + endif + + if (ds_class == 0) + ds_class = kDSCOther + endif + + if (ds_class & classes) + result = AddListItem(ds, result, sep, inf) + endif + endfor + + return result +end + +/// create all data folders along a dataset path +/// +/// if the path ends with a slash, the path is interpreted as a group path, +/// and each part is mapped to a data folder. +/// else, the last part of the path is the name of a dataset +/// and will not produce a folder. +/// +/// the path will always be interpreted as starting from the root, +/// regardless whether it starts with a slash or not. +/// +/// spaces are removed from folder names, and the names are cleaned up to produce simple names. +/// +/// a string variable named "s_hdf5_group" is added to each created folder +/// and contains the incremental path. +/// +/// the first child folder is created in the current data folder. +/// at the end, the lowest child folder is selected and returned as the function result. +/// +/// @param datasetpath hdf5 group path to dataset, e.g. "/scan 1/region 1/ScientaImage". +/// +/// @return data folder reference of the lowest child folder. +/// +function /df psh5_create_folders(datasetpath) + string datasetpath + + if (cmpstr(datasetpath[0], "/") == 0) + datasetpath = datasetpath[1, strlen(datasetpath)-1] + endif + if (cmpstr(datasetpath[strlen(datasetpath)-1], "/") == 0) + datasetpath += "dummy" + endif + + variable nfolders = ItemsInList(datasetpath, "/") - 1 + variable ifolder + string folder + string inc_path = "/" + + for (ifolder = 0; ifolder < nfolders; ifolder += 1) + folder = StringFromList(ifolder, datasetpath, "/") + folder = ps_fix_folder_name(folder) + NewDataFolder /o/s $folder + inc_path += folder + inc_path += "/" + string /g s_hdf5_group = inc_path + endfor + + return GetDataFolderDFR() +end + +/// map dataset path to datafolder path +/// +/// if the path ends with a slash, the path is interpreted as a group path, +/// and each part maps to a data folder. +/// if the last part of the path is the name of a dataset, it is discarded. +/// +/// spaces are removed from folder names, and the names are cleaned up to produce simple names. +/// +/// the path is interpreted as relative to the specified parent data folder. +/// regardless whether it starts with a slash or not. +/// +/// @param datasetpath hdf5 group path to dataset, e.g. "/scan 1/region 1/ScientaImage". +/// +/// @param parent_df parent data folder +/// +/// @return data folder reference +/// +function /df psh5_dataset_to_folder(parent_df, datasetpath) + dfref parent_df + string datasetpath + + if (cmpstr(datasetpath[0], "/") == 0) + datasetpath = datasetpath[1, strlen(datasetpath)-1] + endif + if (cmpstr(datasetpath[strlen(datasetpath)-1], "/") == 0) + datasetpath += "dummy" + endif + + variable nfolders = ItemsInList(datasetpath, "/") - 1 + variable ifolder + string folder + string inc_path = "" + + for (ifolder = 0; ifolder < nfolders; ifolder += 1) + folder = StringFromList(ifolder, datasetpath, "/") + folder = ps_fix_folder_name(folder) + if (ifolder) + inc_path += ":" + endif + inc_path += folder + endfor + + dfref out_df = parent_df:$inc_path + return out_df +end + +/// convert HDF5 group name to data folder name and fix compatibility issues +/// +function /s ps_fix_folder_name(group_name) + string group_name + string folder_name + + folder_name = ReplaceString(" ", group_name, "") + if (cmpstr(folder_name, "attrs") == 0) + folder_name = "attr" + endif + folder_name = PearlCleanupName(folder_name) + + return folder_name +end + +// ====== import functions ====== + +/// load multiple datasets from open file +/// +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. +/// +/// @param create_folders if 1 (default), data folders according to the group path are created. +/// if 0, the dataset is loaded into the current folder. +/// the latter option should be used with care because datasets with same names may be overwritten. +/// +/// @param reduction_func data reduction function. +/// three-dimensional datasets can be reduced in dimensionality by on-the-fly data reduction. +/// by default (or if empty string), no reduction is applied. +/// see @ref psh5_load_dataset_reduced(). +/// +/// @param reduction_params parameter string for the reduction function. +/// +/// @return (string) semicolon-separated list of loaded datasets +/// +function /s psh5_load_datasets(file_df, datasets, [create_folders, reduction_func, reduction_params]) + dfref file_df + string datasets + variable create_folders + string reduction_func + string reduction_params + + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + if (ParamIsDefault(create_folders)) + create_folders = 1 + endif + if (ParamIsDefault(reduction_func)) + reduction_func = "" + endif + if (ParamIsDefault(reduction_params)) + reduction_params = "" endif - return wavenames + dfref save_df = GetdataFolderDFR() + + datasets = unique_strings(datasets) + variable nds = ItemsInList(datasets, ";") + variable ids + string ds + string loaded_datasets = "" + string loaded_waves = "" + + for (ids = 0; ids < nds; ids += 1) + SetDataFolder file_df + ds = StringFromList(ids, datasets, ";") + loaded_waves = psh5_load_dataset(file_df, ds, create_folders=create_folders, reduction_func=reduction_func, reduction_params=reduction_params) + if (strlen(loaded_waves) > 1) + loaded_datasets = loaded_datasets + ds + ";" + endif + endfor + + setdatafolder save_df + return loaded_datasets end /// load a dataset from an open PShell HDF5 file. @@ -754,504 +1144,190 @@ end /// /// - the metadata (HDF5 attributes) are loaded into the wave note, cf. psh5_load_dataset_meta(). /// - dimension labels are set according the dataset name, cf. ps_set_dimlabels(). -/// - wave scaling is set if the necessary scan attributes have been loaded and the `set_scale` option is selected (default). -/// the attributes must be loaded by psh5_load_scan_meta() and psh5_load_scan_attrs() (attr_sets=2). /// -/// the dataset is loaded into the current data folder unless datasetname contains a region specifier. -/// in the latter case, the dataset is loaded into sub-folder with the name of the region. +/// the dataset is loaded into the current data folder or a tree based on the group path given in the datasetpath argument. /// the function returns from the original data folder. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// only numeric and string data types are supported, string datasets must have rank 1. /// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. /// -/// @param datasetname name of the dataset. -/// the name of the loaded wave is a cleaned up version of the dataset name. -/// the name can include the region name as a relative path, e.g. "region1/ScientaSpectrum". -/// in this case, the dataset is loaded into a sub-folder named "region1". +/// @param datasetpath group path and name of the dataset, e.g. "/scan 1/ScientaImage". +/// HDF5 groups map to igor data folders below the current data folder, +/// the wave is placed into the leaf folder. +/// the names of groups and waves are cleaned up to produce simple names, +/// in particular, spaces and other illegal characters are removed. /// -/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. -/// if multiple datasets are loaded from a file, -/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). -/// @arg 1 (default) set the wave scaling. -/// @arg 0 do not set the wave scaling. +/// @param create_folders if 1 (default), data folders according to the group path are created. +/// if 0, the dataset is loaded into the current folder. /// -/// @return name of loaded wave if successful. empty string otherwise. +/// @param reduction_func data reduction function. +/// three-dimensional datasets can be reduced in dimensionality by on-the-fly data reduction. +/// by default (or if empty string), no reduction is applied. +/// see @ref psh5_load_dataset_reduced(). /// -/// @version this function supports regions as of version 1.03. +/// @param reduction_params parameter string for the reduction function. /// -function /s psh5_load_dataset(fileID, scanpath, datasetname, [set_scale]) - variable fileID - string scanpath - string datasetname - variable set_scale - - if (ParamIsDefault(set_scale)) - set_scale = 1 - endif +/// @return semicolon-separated list of loaded wave names. +/// multiple waves are loaded if the dataset has a compound data type. +/// in that case the wave name is a concatenation of the dataset and field names (see HDF5LoadData). +/// +function /s psh5_load_dataset(file_df, datasetpath, [create_folders, reduction_func, reduction_params]) + dfref file_df + string datasetpath + variable create_folders + string reduction_func + string reduction_params dfref base_df = GetDataFolderDFR() - - string datasetpath - datasetpath = scanpath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - - string regionname - string regionpath - if (ItemsInList(datasetname, "/") >= 2) - regionname = StringFromList(0, datasetname, "/") - regionpath = ReplaceString("//", scanpath + "/" + regionname, "/") - datasetname = RemoveListItem(0, datasetname, "/") - NewDataFolder /o/s $regionname - else - regionname = "" - regionpath = scanpath + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() endif - + nvar /sdfr=file_df file_id + + if (ParamIsDefault(create_folders)) + create_folders = 1 + endif + if (create_folders) + psh5_create_folders(datasetpath) + endif + if (ParamIsDefault(reduction_func)) + reduction_func = "" + endif + if (ParamIsDefault(reduction_params)) + reduction_params = "" + endif + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + variable err = HDF5DatasetInfo(file_id, datasetpath, 0, di) if (err != 0) - print "error accessing detector/data" + // error accessing data return "" endif - string dataname - if (di.ndims < 2) - HDF5LoadData /O /Q /Z fileID, datasetpath - dataname = StringFromList(0, S_waveNames) - else - dataname = psh5_load_dataset_slabs(fileID, regionpath, datasetname) - endif + variable numeric = 0 + variable compound = 0 - wave /z data = $dataname - if (waveexists(data)) - psh5_load_dataset_meta(fileID, regionpath, datasetname, data) - ps_set_dimlabels(data) - if (set_scale) - ps_scale_dataset(data) + switch (di.datatype_class) + case H5T_INTEGER: + case H5T_FLOAT: + numeric = 1 + break + case H5T_STRING: + numeric = 0 + break + case H5T_COMPOUND: + compound = 1 + break + case H5T_TIME: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_REFERENCE: + case H5T_ENUM: + case H5T_VLEN: + case H5T_ARRAY: + default: + // unsupported data type + return "" + endswitch + + string wave_names = "" + if (di.ndims <= 2) + string datasetname = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + variable transpose = WhichListItem(datasetname, kTransposedDatasets) >= 0 + HDF5LoadData /O /Q /Z /TRAN=(transpose) file_id, datasetpath + wave_names = S_waveNames + elseif (numeric) + if (exists(reduction_func) == 6) + wave_names = psh5_load_dataset_reduced(file_df, datasetpath, $reduction_func, reduction_params, create_folders=0) + else + wave_names = psh5_load_dataset_slabs(file_df, datasetpath, create_folders=0) endif - else - dataname = "" endif + + variable nw = ItemsInList(wave_names, ";") + variable iw + string sw + string loaded_waves = "" + for (iw = 0; iw < nw; iw += 1) + sw = StringFromList(iw, wave_names) + wave /z w = $sw + if (WaveExists(w)) + loaded_waves = loaded_waves + sw + ";" + ps_set_dimlabels(w) + psh5_load_dataset_meta(file_df, datasetpath, w) + endif + endfor setdatafolder base_df - return dataname -end - -/// select the preferred dataset from a list of available datasets. -/// -/// @param file_datasets semicolon-separated list of datasets that are available in the file. -/// the items may include a path separated by slashes "/". -/// only the last component of the path is checked. -/// -/// @param pref_datasets semicolon-separated list of preferred datasets. -/// the items of the list are match strings for the Igor StringMatch function. -/// the first matching dataset is loaded from the file. -/// if no match is found, the first file dataset is selected. -/// -/// @return selected dataset. -/// -static function /s select_dataset(file_datasets, pref_datasets) - string file_datasets - string pref_datasets - - variable index - variable nds = ItemsInList(file_datasets) - variable ids - string sds = "" - string mds = "" - variable np = ItemsInList(pref_datasets) - variable ip - string sp - variable found = 0 - if (nds > 0) - for (ip = 0; ip < np; ip += 1) - for (ids = 0; ids < nds; ids += 1) - sds = StringFromList(ids, file_datasets) - index = ItemsInList(sds, "/") - 1 - mds = StringFromList(index, sds, "/") - sp = StringFromList(ip, pref_datasets) - if (StringMatch(mds, sp)) - found = 1 - break - endif - endfor - if (found) - break - endif - endfor - if (!found) - ids = 0 - sds = StringFromList(ids, file_datasets) - endif - endif - - return sds -end - -/// load a preview dataset from an open PShell HDF5 file. -/// -/// if the dataset has a maximum of two dimensions, the function loads it at once. -/// if it has more than two dimension, the function selects and loads one two-dimensional slab. -/// -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. -/// if multiple datasets are loaded from a file, -/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). -/// @arg 1 (default) set the wave scaling. -/// @arg 0 do not set the wave scaling. -/// -/// @param pref_datasets semicolon-separated list of preferred datasets. -/// the items of the list are match strings for the Igor StringMatch function. -/// the first matching dataset is loaded from the file. -/// if no match is found, the first dataset listed in the file is loaded. -/// if empty, a hard-coded default preference list is used. -/// -/// @return name of loaded wave if successful. empty string otherwise. -/// -function /s psh5_load_scan_preview(fileID, scanpath, [set_scale, pref_datasets]) - variable fileID - string scanpath - variable set_scale - string pref_datasets - - if (ParamIsDefault(set_scale)) - set_scale = 1 - endif - if (ParamIsDefault(pref_datasets) || (strlen(pref_datasets) == 0)) - pref_datasets = kPreviewDatasets - endif - - dfref saveDF = GetDataFolderDFR() - dfref dataDF = saveDF - - string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1) - string datasetname = select_dataset(datasets, pref_datasets) - string datasetpath - datasetpath = scanpath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - - STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. - InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) - if (err != 0) - print "error accessing detector/data" - return "" - endif - - string dataname - if (di.ndims < 2) - HDF5LoadData /O /Q /Z fileID, datasetpath - dataname = StringFromList(0, S_waveNames) - wave /z data = $dataname - if (waveexists(data)) - ps_set_dimlabels(data) - endif - else - variable dim2start = 0 - variable dim2count = 1 - variable dim3start = 0 - variable dim3count = 1 - if (di.ndims >= 3) - dim2start = floor(di.dims[2] / 2) - dim2count = 1 - endif - if (di.ndims >= 4) - dim3start = floor(di.dims[3] / 2) - dim3count = 1 - endif - - dataname = psh5_load_dataset_slab(fileID, scanpath, datasetname, dim2start, dim2count, dim3start, dim3count) - endif - - wave /z data = $dataname - if (waveexists(data)) - if (set_scale) - setdatafolder dataDF - string positioners - string positioner - string positionerpath - positioners = psh5_load_scan_meta(fileID, scanpath) - wave /t /z ScanWritables - if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1)) - positioner = ScanWritables[0] - if (strlen(positioner) > 0) - positionerpath = scanpath + "/" + positioner - positionerpath = ReplaceString("//", positionerpath, "/") - HDF5LoadData /O /Q /Z fileID, positionerpath - endif - endif - - setdatafolder dataDF - newdatafolder /o/s attr - psh5_load_scan_attrs(fileID, scanpath, attr_sets=2) - setdatafolder dataDF - ps_scale_dataset(data) - endif - else - dataname = "" - endif - - return dataname -end - -/// load a longitudinal section of a scan from an open PShell HDF5 file. -/// -/// the dataset must have three dimensions. -/// -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". -/// -/// @param dim reserved, must be 0. -/// -/// @param set_scale by default, the function tries to set the wave scaling if the attributes have been loaded. -/// if multiple datasets are loaded from a file, -/// it is more efficient to set the scaling of all loaded datasets at the end by calling ps_scale_datasets(). -/// @arg 1 (default) set the wave scaling. -/// @arg 0 do not set the wave scaling. -/// -/// @param pref_datasets semicolon-separated list of preferred datasets. -/// the items of the list are match strings for the Igor StringMatch function. -/// the first matching dataset is loaded from the file. -/// if no match is found, the first dataset listed in the file is loaded. -/// if empty, a hard-coded default preference list is used. -/// -/// @return name of loaded wave if successful. empty string otherwise. -/// -/// @warning EXPERIMENTAL: this function is under development. -/// -function /s psh5_load_scan_section(fileID, scanpath, dim, [set_scale, pref_datasets]) - variable fileID - string scanpath - variable dim - variable set_scale - string pref_datasets - - // select first dimension (future argument) - // 0 = first dimension is x axis (energy of scienta image) - dim = 0 - - if (ParamIsDefault(set_scale)) - set_scale = 1 - endif - if (ParamIsDefault(pref_datasets) || (strlen(pref_datasets) == 0)) - pref_datasets = kPreviewDatasets - endif - - dfref saveDF = GetDataFolderDFR() - dfref dataDF = saveDF - - string datasets = psh5_list_scan_datasets(fileID, scanpath) - string datasetname = select_dataset(datasets, pref_datasets) - string datasetpath - datasetpath = scanpath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - string dataname = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") - string destname = dataname[0,29] + num2str(dim) - - STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. - InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) - if (err != 0) - print "error accessing detector/data" - return "" - elseif (di.ndims != 3) - print "error: rank of dataset != 3" - return "" - endif - - variable idx, idy, idz, idt - variable transpose = WhichListItem(dataname, kTransposedDatasets) >= 0 - if (transpose) - idx = 1 - idy = 0 - else - idx = 0 - idy = 1 - endif - idz = 2 - idt = 3 - - variable nx, ny, nz - nx = di.dims[idx] - ny = di.dims[idy] - nz = di.dims[idz] - - HDF5MakeHyperslabWave("slab", max(di.ndims, 4)) - wave slab - slab[][%Start] = 0 - slab[][%Stride] = 1 - slab[][%Count] = 1 - slab[][%Block] = 1 - - if (dim == 0) - slab[idy][%Start] = floor(ny / 2) - slab[idx][%Block] = nx - make /n=(nx,nz) /o $destname - else - slab[idx][%Start] = floor(nx / 2) - slab[idy][%Block] = ny - make /n=(ny,nz) /o $destname - endif - slab[idz][%Block] = nz - wave data = $destname - data = 0 - - HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath - if (!v_flag) - wave slabdata - if (transpose) - data += slabdata[0][p][q][0] - else - data += slabdata[p][0][q][0] - endif - endif - killwaves /z slab, slabdata - - if (set_scale) - make /n=(1,1,1) /free dummy - ps_set_dimlabels2(dummy, dataname) - setdimlabel 0, -1, $GetDimLabel(dummy, dim, -1), data - setdimlabel 1, -1, $kScanDimLabel, data - - setdatafolder dataDF - string positioners - string positioner - string positionerpath - positioners = psh5_load_scan_meta(fileID, scanpath) - wave /t /z ScanWritables - if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1)) - positioner = ScanWritables[0] - if (strlen(positioner) > 0) - positionerpath = scanpath + "/" + positioner - positionerpath = ReplaceString("//", positionerpath, "/") - HDF5LoadData /O /Q /Z fileID, positionerpath - endif - endif - - setdatafolder dataDF - newdatafolder /o/s attr - killwaves /a/z - psh5_load_scan_attrs(fileID, scanpath, attr_sets=2) - setdatafolder dataDF - ps_scale_dataset(data) - endif - - return destname -end - -/// load metadata of a PShell dataset. -/// -/// "metadata" are the HDF5 attributes attached to the scan dataset. -/// -/// data is added to the wave note. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param datapath path to the containing group in the HDF5 file. -/// path separator is the slash "/". -/// -/// @param datasetname name of the dataset. -/// may include relative path. -/// -/// @param datawave metadata is added to the wave note of this wave. -/// -/// @return 0 if successful, non-zero if an error occurred. -/// -function psh5_load_dataset_meta(fileID, datapath, datasetname, datawave) - variable fileID - string datapath - string datasetname - wave datawave - - dfref saveDF = GetDataFolderDFR() - SetDataFolder NewFreeDataFolder() - - string datasetpath = datapath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - string wnote - - HDF5LoadData /O /Q /Z /A="Writable Dimension" /N=WriteDim fileID, datasetpath - if (!v_flag) - wave WriteDim - // scan dimension starts at 1 - sprintf wnote, "ScanDimension=%u", WriteDim[0] - Note datawave, wnote - endif - - HDF5LoadData /O /Q /Z /A="Writable Index" /N=WriteIndex fileID, datasetpath - if (!v_flag) - wave WriteIndex - sprintf wnote, "WriteableIndex=%u", WriteIndex[0] - Note datawave, wnote - endif - - HDF5LoadData /O /Q /Z /A="Readable Index" /N=ReadIndex fileID, datasetpath - if (!v_flag) - wave ReadIndex - sprintf wnote, "ReadableIndex=%u", ReadIndex[0] - Note datawave, wnote - endif - - setdatafolder saveDF - return 0 + return loaded_waves end /// load a dataset slab-wise from the open PShell HDF5 file. /// /// the function loads the dataset image by image using the hyperslab option. +/// the wave is loaded into the current data folder. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. /// -/// @param datapath path to the containing group in the HDF5 file. -/// path separator is the slash "/". +/// @param datasetpath group path and name of the dataset. +/// the dataset name defines the name of the loaded wave (after cleaning up). /// -/// @param dataset name of the dataset. -/// also defines the name of the loaded wave. +/// @param progress select whether a progress window is displayed during the process. +/// @arg 1 (default) show progress window. +/// @arg 0 do not show progress window. /// -/// @param progress select whether a progress window is displayed during the process. -/// @arg 1 (default) show progress window. -/// @arg 0 do not show progress window. +/// @param create_folders if 1 (default), data folders according to the group path are created. +/// if 0, the dataset is loaded into the current folder. /// -/// @return name of loaded wave if successful. empty string otherwise. +/// @return semicolon-separated list of loaded wave names. +/// in the current version, the function returns zero or one wave, as it does not support compound types. /// -function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) - variable fileID - string datapath - string datasetname +function /s psh5_load_dataset_slabs(file_df, datasetpath, [create_folders, progress]) + dfref file_df + string datasetpath + variable create_folders variable progress - + + if (ParamIsDefault(create_folders)) + create_folders = 1 + endif if (ParamIsDefault(progress)) progress = 1 endif - variable result = 0 - string datasetpath - string datawavename - datasetpath = datapath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + variable result = 0 + string datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + + if (create_folders) + psh5_create_folders(datasetpath) + endif + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + variable err = HDF5DatasetInfo(file_id, datasetpath, 0, di) if (err != 0) - print "error accessing detector/data" + print "error accessing dataset", datasetpath return "" endif if (di.ndims < 2) - print "error: rank of dataset < 2" + print "error: rank of dataset < 2", datasetpath return "" elseif (di.ndims < 3) progress = 0 endif + if ((di.datatype_class != H5T_INTEGER) && (di.datatype_class != H5T_FLOAT)) + print "error: unsupported datatype", datasetpath + return "" + endif variable idx, idy, idz, idt, izt variable transpose = WhichListItem(datawavename, kTransposedDatasets) >= 0 @@ -1278,7 +1354,7 @@ function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) nzt = nz * nt izt = 0 if (progress) - display_progress_panel("HDF5 Import", "Loading data...", nzt) + display_progress_panel("HDF5 Import", "Loading " + datasetpath + "...", nzt) endif // load data image by image @@ -1296,7 +1372,7 @@ function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) for (it = 0; it < nt; it += 1) slab[idz][%Start] = iz slab[idt][%Start] = it - HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath + HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata file_id, datasetpath wave slabdata // 2D, 3D, or 4D with singletons if (transpose) data[][][iz][it] = slabdata[q][p][0][0] @@ -1325,64 +1401,121 @@ function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress]) killwaves /z slab, slabdata if (!result) ps_set_dimlabels(data) - return datawavename + return NameOfWave(data) + ";" else killwaves /z data return "" endif end -/// load a single image from the open PShell data file. +// ====== data reduction ====== + +/// load a dataset with reduced dimensionality /// -/// the function can average over a region in the extra dimensions. +/// the function loads the dataset image by image using the hyperslab option +/// and applies a custom reduction function like numeric integration, curve fitting, etc. to each image. +/// the results from the reduction function are written to the `ReducedData1`, `ReducedData2`, etc. waves. +/// the raw data are discarded. /// -/// @param fileID ID of open HDF5 file from psh5_open_file(). +/// example reduction functions can be found in the @ref PearlScientaPreprocess module. +/// they must implement the @ref adh5_default_reduction() interface. /// -/// @param datapath path to the containing group in the HDF5 file. -/// path separator is the slash "/". +/// by default, the reduction function is called in separate threads to reduce the total loading time. +/// (psh5_load() reports the total run time in the global variable psh5_perf_secs.) +/// the effect varies depending on the balance between file loading (image size) +/// and data processing (complexity of the reduction function). /// -/// @param dataset name of the dataset. -/// also defines the name of the loaded wave. +/// the function loads images (as hyperslabs) one by one and passes them to the reduction function. +/// only a limited number of images are held in the queue at a time to limit memory use. +/// for debugging the reduction function, multi-threading can be disabled +/// (also remove threadsafe attributes from reduce_slab_image() and the reduction function!) /// -/// @param dim2start 2nd dimension coordinate of the first image -/// set to 0 if dimension may not be present +/// if the reduction function requires the image waves to be scaled properly, +/// the attributes must have been loaded by psh5_load_scan_attrs() before. +/// in this case, the scales of the result waves are also set by the function. +/// otherwise, the results can also be scaled by ps_scale_dataset() later. /// -/// @param dim2count number of subsequent images to average -/// set to 1 if dimension may not be present +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. /// -/// @param dim3start 3rd dimension coordinate of the first image -/// set to 0 if dimension may not be present +/// @param scanpath path to scan group in the HDF5 file. /// -/// @param dim3count number of subsequent images to average -/// set to 1 if dimension may not be present +/// @param datasetname name of the dataset. +/// this must currently be "ScientaImage", other data is not supported. +/// the name of the loaded wave is a cleaned up version of the dataset name. +/// the name can include the region name as a relative path, e.g. "region1/ScientaImage". +/// in this case, the dataset is loaded into a sub-folder named "region1". /// -/// @return name of loaded wave if successful. empty string otherwise. +/// @param reduction_func custom data reduction function. +/// this can be any user-defined function which has the same parameters as @ref adh5_default_reduction. +/// some reduction functions are predefined in the @ref PearlScientaPreprocess module. /// -function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim2count, dim3start, dim3count) - variable fileID - string datapath - string datasetname - variable dim2start - variable dim2count - variable dim3start - variable dim3count - +/// @param reduction_params parameter string for the reduction function. +/// +/// @param create_folders if 1 (default), data folders according to the group path are created. +/// if 0, the dataset is loaded into the current folder. +/// +/// @param progress progress window. +/// @arg 1 (default) show progress window +/// @arg 0 do not show progress window +/// +/// @param nthreads +/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread). +/// @arg 0 use main thread only (for debugging and profiling). +/// @arg >= 1 use a fixed number of (additional) threads. +/// +/// @return semicolon-separated list of the loaded dataset `ReducedData1`, `ReducedData2`, etc. if successful. +/// auxiliary waves, scan positions, attributes are loaded but not listed in the string. +/// empty string if an error occurred. +/// error messages are printed to the history. +/// +function /s psh5_load_dataset_reduced(file_df, datasetpath, reduction_func, reduction_params, [create_folders, progress, nthreads]) + dfref file_df string datasetpath - string datawavename - datasetpath = datapath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + funcref adh5_default_reduction reduction_func + string reduction_params + variable create_folders + variable progress + variable nthreads + + if (ParamIsDefault(create_folders)) + create_folders = 1 + endif + if (ParamIsDefault(progress)) + progress = 1 + endif + if (ParamIsDefault(nthreads)) + nthreads = -1 + endif + + dfref base_df = GetDataFolderDFR() + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + + variable result = 0 + string datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") + string wavenames = "" - STRUCT HDF5DataInfo di + if (create_folders) + psh5_create_folders(datasetpath) + endif + + STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) + variable err = HDF5DatasetInfo(file_id, datasetpath, 0, di) if (err != 0) print "error accessing detector/data" - return "" + result = -1 + return wavenames endif if (di.ndims < 2) print "error: rank of dataset < 2" - return "" + result = -2 + return wavenames + elseif (di.ndims < 3) + progress = 0 endif variable idx, idy, idz, idt @@ -1396,14 +1529,18 @@ function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim endif idz = 2 idt = 3 - - variable nx, ny + + variable nx, ny, nz, nt, nzt nx = di.dims[idx] ny = di.dims[idy] - make /n=(nx,ny) /o $datawavename - wave data = $datawavename - data = 0 - + nz = di.dims[idz] + nt = di.dims[idt] + // adjust singleton dimensions + nz = max(nz, 1) + nt = max(nt, 1) + nzt = nz * nt + + // load data image by image HDF5MakeHyperslabWave("slab", max(di.ndims, 4)) wave slab slab[][%Start] = 0 @@ -1413,35 +1550,469 @@ function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim slab[idx][%Block] = nx slab[idy][%Block] = ny - variable iz, it - variable navg = 0 - variable dim2end = dim2start + dim2count - 1 - variable dim3end = dim3start + dim3count - 1 - for (iz = dim2start; iz <= dim2end; iz += 1) - for (it = dim3start; it <= dim3end; it += 1) - slab[idz][%Start] = iz - slab[idt][%Start] = it - HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath - if (!v_flag) - wave slabdata - if (transpose) - data += slabdata[q][p][0][0] - else - data += slabdata[p][q][0][0] - endif - navg += 1 - endif + // set up multi threading + if (nthreads < 0) + nthreads = ThreadProcessorCount + endif + if (nthreads > 0) + variable threadGroupID = ThreadGroupCreate(nthreads) + variable ithread + for (ithread = 0; ithread < nthreads; ithread += 1) + ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func) endfor - endfor - if (navg) - data /= navg + else + make /n=(nzt) /df /free processing_folders endif - killwaves /z slab, slabdata - ps_set_dimlabels(data) - return datawavename + if (progress) + display_progress_panel("PShell Import", "Reducing " + datasetpath + "...", nzt) + endif + + // create a template wave with the correct scales and labels + make /n=(nx,ny) /d /o $datawavename + wave template = $datawavename + ps_set_dimlabels2(template, datawavename) + ps_scale_dataset(template) + + variable iz, it, izt + variable n_sent = 0 + variable n_recvd = 0 + variable tmo = 0 + string dfname + dfref dfr + variable iw, nw + string sw + make /n=0 /free /wave result_waves + + iz = 0 + it = 0 + + do + // fill the processing queue up to a maximum number of folders + if (n_sent < max(1, nthreads) * 10 + n_recvd) + if (iz < nz) + if (it < nt) + // load a slab into a temporary folder + slab[idz][%Start] = iz + slab[idt][%Start] = it + dfname = "processing_" + num2str(n_sent) + NewDataFolder /s $dfname + HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata file_id, datasetpath + + duplicate template, image + variable /g r_index = iz + variable /g s_index = it + string /g func_param = reduction_params + + if (nthreads > 0) + // send to thread group + WaveClear image + ThreadGroupPutDF threadGroupID, : + else + // process immediately in single-thread mode + processing_folders[n_sent] = GetDataFolderDFR() + make /n=1/d profile1, profile2 + wave slabdata + wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param) + variable /g func_result = numpnts(reduced_waves) + adh5_get_result_waves(reduced_waves, "redw_", 0) + WaveClear slabdata, image, reduced_waves + setdatafolder :: + endif + + iz += 1 + n_sent += 1 + tmo = 0 + else + iz += 1 + it = 0 + endif + endif + else + // throttle the loop if processing is slow + tmo = min(100, tmo + 10) + endif + + // receive a slab from the processing queue + if (n_recvd < nzt) + if (nthreads > 0) + dfr = ThreadGroupGetDFR(threadGroupID, tmo) + else + dfr = processing_folders[n_recvd] + processing_folders[n_recvd] = $"" + endif + + if (DatafolderRefStatus(dfr) != 0) + // access results folder + nvar rr = dfr:r_index + nvar ss = dfr:s_index + nvar func_result = dfr:func_result + + if (func_result < 1) + print "error during data reduction." + result = -3 + break + endif + + // initialize result waves just once + if (numpnts(result_waves) == 0) + redimension /n=(func_result) result_waves + for (iw = 0; iw < func_result; iw += 1) + sw = "redw_" + num2str(iw) + wave profile = dfr:$sw + sw = "ReducedData" + num2str(iw+1) + make /n=(dimsize(profile, 0), nz, nt) /d /o $sw + wave data = $sw + setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data + setdimlabel 1, -1, $kScanDimLabel, data + note data, note(profile) + ps_scale_dataset(data) + setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data + setscale d 0, 0, waveunits(profile, -1), data + result_waves[iw] = data + endfor + endif + + // copy results + for (iw = 0; iw < func_result; iw += 1) + sw = "redw_" + num2str(iw) + wave profile = dfr:$sw + wave data = result_waves[iw] + data[][rr][ss] = profile[p] + endfor + + n_recvd += 1 + KillDataFolder /Z dfr + endif + else + // processing complete + break + endif + + // update progress window + if (progress) + if (update_progress_panel(n_recvd)) + print "user abort" + result = -4 + break + endif + endif + while ((n_recvd < nzt) && (result == 0)) + + // clean up + killwaves /z slab, slabdata, template + + if (nthreads > 0) + variable tstatus = ThreadGroupRelease(threadGroupID) + if (tstatus == -2) + print "error: thread did not terminate properly." + result = -5 + endif + endif + + // finalize results + nw = numpnts(result_waves) + wavenames = "" + for (iw = 0; iw < nw; iw += 1) + wave /z data = result_waves[iw] + if (WaveExists(data)) + if (nz == 1) + redimension /n=(-1, 0, 0) data + elseif (nt == 1) + redimension /n=(-1, nz, 0) data + endif + wavenames += nameofwave(data) + ";" + endif + endfor + + if (progress) + kill_progress_panel() + endif + + setdatafolder base_df + return wavenames end + +threadsafe static function reduce_slab_worker(reduction_func) + funcref adh5_default_reduction reduction_func + do + // wait for job from main thread + do + dfref dfr = ThreadGroupGetDFR(0, 1000) + if (DataFolderRefStatus(dfr) == 0) + if (GetRTError(2)) + return 0 // no more jobs + endif + else + break + endif + while (1) + + // get input data + wave slabdata = dfr:slabdata + wave image = dfr:image + svar func_param = dfr:func_param + nvar rr = dfr:r_index + nvar ss = dfr:s_index + + // do the work + newdatafolder /s out_df + variable /g r_index = rr + variable /g s_index = ss + wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param) + variable /g func_result = numpnts(reduced_waves) + + // send output to queue and clean up + adh5_get_result_waves(reduced_waves, "redw_", 0) + WaveClear slabdata, image, reduced_waves + ThreadGroupPutDF 0, : + KillDataFolder dfr + while (1) + + return 0 +end + +threadsafe static function /wave reduce_slab_image(slabdata, image, reduction_func, reduction_params) + wave slabdata + wave image + funcref adh5_default_reduction reduction_func + string reduction_params + + image = slabdata[q][p][0][0] + + return reduction_func(image, reduction_params) +end + +// ====== meta and auxiliary data ====== + +/// load organizational metadata from the general group. +/// +/// the general group contains the following datasets: +/// authors, pgroup, proposal, proposer, sample. +/// +/// data is loaded into the current data folder. +/// all items are loaded into strings, authors is a comma-separated list. +/// missing items default to empty strings. +/// +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. +/// +/// @return semicolon-separated list of the objects. +/// +function /s psh5_load_general_group(file_df) + dfref file_df + + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + + string obj_names = "authors;pgroup;proposal;proposer;sample;" + variable nn = ItemsInList(obj_names, ";") + variable ii + string name + + for (ii = 0; ii < nn; ii += 1) + name = StringFromList(ii, obj_names, ";") + psh_load_general_string(file_df, name) + endfor + + return obj_names +end + +/// load a string from the general group. +/// +/// the general group contains the following datasets: +/// authors, pgroup, proposal, proposer, sample. +/// +/// data is loaded into a global string in the current data folder. +/// arrays with multiple items are loaded into a comma-separated list. +/// a missing item defaults to the empty string. +/// +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. +/// +/// @return comma-separated list of values. +/// +function /s psh_load_general_string(file_df, name) + dfref file_df + string name + + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + + string path = "/general/" + name + HDF5LoadData /O /Q /Z /N=wt_load_general /TYPE=1 file_id, path + string values = "" + if (!v_flag) + wave /t wt_load_general + variable nn = numpnts(wt_load_general) + variable ii + for (ii = 0; ii < nn; ii += 1) + values = AddListItem(wt_load_general[ii], values, ",", inf) + endfor + killwaves /z wt_load_general + if (strlen(values) >= 1) + values = values[0,strlen(values)-2] + endif + endif + string /g $name = values + return values +end + +/// load metadata of a PShell dataset. +/// +/// _metadata_ are the HDF5 attributes attached to a dataset. +/// they are mapped to "key=value" pairs and added to the wave note in separate lines. +/// the following attributes are loaded. +/// names and mappings are hard-coded. +/// +/// - "Writable Dimension" -> "ScanDimension" +/// - "Writable Index" -> "WriteableIndex" +/// - "Readable Index" -> "ReadableIndex" +/// +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. +/// +/// @param datasetpath group path and name of the dataset. +/// path separator is the slash "/". +/// +/// @param datawave metadata is added to the wave note of this wave. +/// +/// @return 0 if successful, non-zero if an error occurred. +/// +function psh5_load_dataset_meta(file_df, datasetpath, datawave) + dfref file_df + string datasetpath + wave datawave + + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + + dfref save_df = GetDataFolderDFR() + SetDataFolder NewFreeDataFolder() + + string wnote + + HDF5LoadData /O /Q /Z /A="Writable Dimension" /N=WriteDim file_id, datasetpath + if (!v_flag) + wave WriteDim + // scan dimension starts at 1 + sprintf wnote, "ScanDimension=%u", WriteDim[0] + Note datawave, wnote + endif + + HDF5LoadData /O /Q /Z /A="Writable Index" /N=WriteIndex file_id, datasetpath + if (!v_flag) + wave WriteIndex + sprintf wnote, "WriteableIndex=%u", WriteIndex[0] + Note datawave, wnote + endif + + HDF5LoadData /O /Q /Z /A="Readable Index" /N=ReadIndex file_id, datasetpath + if (!v_flag) + wave ReadIndex + sprintf wnote, "ReadableIndex=%u", ReadIndex[0] + Note datawave, wnote + endif + + setdatafolder save_df + return 0 +end + +/// load metadata of a PShell scan group. +/// +/// _metadata_ are the HDF5 attributes attached to the scan group. +/// the following attributes are loaded. +/// the respective wave names under Igor are given in parentheses. +/// +/// - Dimensions (ScanDimensions) +/// - Writables (ScanWritables) +/// - Readables (ScanReadables) +/// - Steps (ScanSteps) +/// - Iterations (ScanIterations) - if present (XPSSpectrum script) +/// - Step Size (ScanStepSize) - if present (XPSSpectrum script) +/// - Step Time (ScanStepTime) - if present (XPSSpectrum script) +/// +/// if they are missing in the file, `ScanDimensions` and `ScanReadables` are set to default values +/// assuming the file contains a single spectrum. +/// +/// data is loaded into the current data folder. +/// +/// @param file_df data folder reference of open HDF5 file from psh5_open_file(). +/// if undefined, the current datafolder is assumed. +/// +/// @param scanpath path to the scan group in the HDF5 file, e.g. "/scan 1". +/// +/// @return semicolon-separated list of the loaded waves. +/// +function /s psh5_load_scan_meta(file_df, scanpath) + // todo: convert to variables/strings + dfref file_df + string scanpath + string wavenames = "" + + if (!DataFolderRefStatus(file_df)) + dfref file_df = GetDataFolderDFR() + endif + nvar /sdfr=file_df file_id + + HDF5LoadData /O /Q /Z /A="Dimensions" /N=ScanDimensions /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + else + make /n=1 /o ScanDimensions + ScanDimensions = 0 + wavenames = AddListItem("ScanDimensions", wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Readables" /N=ScanReadables /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + else + make /n=1 /o /t ScanReadables + ScanReadables[0] = "ScientaSpectrum" + wavenames = AddListItem("ScanReadables", wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Writables" /N=ScanWritables /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + else + // OTF script + HDF5LoadData /O /Q /Z /A="PlotDomain" /N=ScanWritables /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + endif + HDF5LoadData /O /Q /Z /A="Steps" /N=ScanSteps /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + wavenames = ReplaceString(";;", wavenames, ";") + + // additional attributes from XPSSpectrum.py + HDF5LoadData /O /Q /Z /A="Iterations" /N=ScanIterations /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Step Size" /N=ScanStepSize /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + HDF5LoadData /O /Q /Z /A="Step Time" /N=ScanStepTime /TYPE=1 file_id, scanpath + if (!v_flag) + wavenames = AddListItem(s_wavenames, wavenames, ";", inf) + endif + + return wavenames +end + +// ====== dimension scaling ====== + /// set dimension labels according to the axis type /// /// this function asserts a particular ordering of dimensions types @@ -1528,130 +2099,85 @@ function ps_set_dimlabels2(data, name) return 0 end -/// find the scan folder +/// find the scan folder of current data /// -/// the scan folder is the one that contains the :attr folder +/// assuming we are in the data folder (where the scan results, ScientaSpectrum, etc.) are, +/// find the associated scan folder. +/// this can either be the same (usually) or the parent folder (multi-region scans). +/// +/// the scan folder is the one that contains the ScanWritables wave. /// the data and scan folders may refer to the same folder. /// -static function /df find_scan_folder(dataDF) - dfref dataDF +function /df ps_find_scan_folder(data_df) + dfref data_df - dfref attrDF = dataDF:attr - if (!DataFolderRefStatus(attrDF)) - string df = GetDataFolder(1, dataDF) + ":" - dfref scanDF = $df + wave /z /t /sdfr=data_df ScanWritables=::ScanWritables + if (WaveExists(ScanWritables)) + string sdf = GetDataFolder(1, data_df) + dfref parent_df = $(sdf + ":") + return parent_df else - dfref scanDF = dataDF + return data_df endif - return scanDF end /// find the attributes data folder /// -/// this is the :attr folder. +/// the attributes folder contains diagnostic beamline data at each scan point. +/// the folder can have one of several names due to different pshell versions: +/// "attr", "attrs", or "diags" (from 2022 on). +/// historically, the folder was named "attr" due to the area detector software. /// -static function /df find_attr_folder(dataDF) - dfref dataDF +/// assuming we are in the scan folder (where the ScanWritables, etc.) are, +/// find the associated attributes folder. +/// +function /df ps_find_attr_folder(scan_df) + dfref scan_df - dfref attrDF = dataDF:attr - if (!DataFolderRefStatus(attrDF)) - string df = GetDataFolder(1, dataDF) + ":" - dfref scanDF = $df - dfref attrDF = scanDF:attr + dfref diags_df = data_df:diags + dfref attrs_df = scan_df:attrs + dfref attr_df = scan_df:attr + if (DataFolderRefStatus(diags_df)) + return diags_df + elseif (DataFolderRefStatus(attrs_df)) + return attrs_df + elseif (DataFolderRefStatus(attr_df)) + return attr_df + else + return $"" endif - return attrDF end -/// set the dimension scales of loaded PShell Scienta datasets according to attributes. +/// find a wave in scan and attr data folders /// -/// datasets listed in the ScanReadables waves are scaled -/// according to the attribute waves in the data, scan, and attributes folders, -/// whichever is found first. +/// look up a wave by name in the given three data folders. +/// return the first one found. /// -/// the current datafolder must contain the ScanReadables wave and the :attr folder. -/// the ScanReadables text wave contains names of the waves to scale. -/// wave names can include a relative path to a sub-folder. the path separator is "/". +/// @param df1 first data folder to check +/// @param df2 second data folder to check +/// @param df3 third data folder to check +/// @return wave reference, empty reference if not found /// -/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels(). -/// this is implicitly done by the high-level load functions. -/// -/// @version this function supports regions from version 1.03. -/// check that you're in the correct data folder! -/// -function ps_scale_datasets() - dfref scanDF = GetDataFolderDFR() - dfref attrDF = find_attr_folder(scanDF) - - make /n=3 /free lo, hi - make /n=3 /t /free ax, un - wave /t /z /SDFR=scanDF ScanReadables - if (WaveExists(ScanReadables)) - variable isr - variable nsr = numpnts(ScanReadables) - string ssr - string sdf - for (isr = 0; isr < nsr; isr += 1) - setdatafolder scanDF - ssr = ScanReadables[isr] - if (ItemsInList(ssr, "/") >= 2) - sdf = StringFromList(0, ssr, "/") - ssr = RemoveListItem(0, ssr, "/") - setdatafolder $sdf - endif - wave /z wsr=$ssr - if (WaveExists(wsr)) - ps_detect_scale(ax, lo, hi, un) - ps_scale_dataset_2(wsr, ax, lo, hi, un) - endif - endfor - endif - setdatafolder scanDF -end - -/// set the dimension scales of a loaded PShell Scienta dataset according to attributes. -/// -/// the current datafolder must contain the :attr folder. -/// the data wave can be in the current folder or a sub-folder. -/// -/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels(). -/// this is implicitly done by the high-level load functions. -/// -/// the function is useful if a single dataset is loaded and scaled. -/// if multiple datasets are loaded, ps_scale_datasets() is slightly more efficient. -/// -/// @param data data wave to be scaled. -/// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels(). -/// -/// @version this function supports regions from version 1.03. -/// -function ps_scale_dataset(data) - wave data - - dfref saveDF = GetDataFolderDFR() - dfref dataDF = GetWavesDataFolderDFR(data) - - setdatafolder dataDF - make /n=3 /free lo, hi - make /n=3 /t /free ax, un - ps_detect_scale(ax, lo, hi, un) - ps_scale_dataset_2(data, ax, lo, hi, un) - setdatafolder saveDF -end - -static function /wave find_scale_wave(name, dataDF, scanDF, attrDF) +function /wave ps_find_scale_wave(name, df1, df2, df3) string name - dfref dataDF - dfref scanDF - dfref attrDF + dfref df1 + dfref df2 + dfref df3 - wave /SDFR=dataDF /Z w = $name - if (!WaveExists(w)) - wave /SDFR=scanDF /Z w = $name - if (!WaveExists(w)) - wave /SDFR=attrDF /Z w = $name + variable idf + variable ndf=3 + make /n=(ndf) /df /free dfs + dfs[0] = {df1, df2, df3} + for (idf = 0; idf < ndf; idf += 1) + if (DataFolderRefStatus(dfs[idf])) + wave /SDFR=dfs[idf] /Z w = $name + if (WaveExists(w)) + return w + endif endif - endif - return w + endfor + + return $"" end /// detect the dimension scales from attributes. @@ -1660,9 +2186,10 @@ end /// the results are written to the provided waves. /// the function is normally called by ps_scale_datasets() but can also be used independently. /// -/// the current datafolder must be the data or the scan folder. /// the data folder contains the waves that are to be scaled. -/// the scan folder contains the scan positions and the :attr folder. +/// the function looks for the scan positions and diagnostics as necessary. +/// if the scaling data is not found, the scales are not changed. +/// the kEssentialDiags flag can be used with psh5_load() to select the necessary datasets. /// /// the provided waves are redimensioned by the function, and dimension labels are set. /// the scale parameters can then be extracted by keyword, e.g., @@ -1671,7 +2198,7 @@ end /// @arg `lo[%%scan]` scan dimension. /// @arg `lo[%%data]` data dimension. /// -/// the function tries to read the following waves, in the data, scan, and attributes folders, +/// the function tries to read the following waves, in the data, scan, and attributes/diagnostics folders, /// where the first folder in the list takes precedence. /// it may fall back to more or less reasonable default values if no data is not found. /// @arg `LensMode` @@ -1682,6 +2209,9 @@ end /// @arg `ScanWritables` /// @arg wave referenced by `ScanWritables[0]` /// +/// @param data_df data folder which contains the waves to be scaled. +/// this is usually the "scan" or "region" folder. +/// /// @param ax text wave to receive the axis labels. /// /// @param lo wave to receive the lower limits. @@ -1692,18 +2222,15 @@ end /// /// @return the function results are written to the lo, hi, un, and ax waves. /// -/// @version this function supports regions from version 1.03. -/// check that you're in the correct data folder! -/// -function ps_detect_scale(ax, lo, hi, un) +function ps_detect_scale(data_df, ax, lo, hi, un) + dfref data_df wave /t ax wave lo wave hi wave /t un - dfref dataDF = GetDataFolderDFR() - dfref scanDF = find_scan_folder(dataDF) - dfref attrDF = find_attr_folder(dataDF) + dfref scan_df = ps_find_scan_folder(data_df) + dfref attr_df = ps_find_attr_folder(scan_df) redimension /n=4 lo, hi, un, ax setdimlabel 0, 0, $kEnergyDimLabel, lo, hi, un, ax @@ -1732,12 +2259,13 @@ function ps_detect_scale(ax, lo, hi, un) un[%$kDataDimLabel] = "arb." ax[%$kDataDimLabel] = "value" - wave /SDFR=attrDF /T /Z LensMode - wave /Z ChannelBegin = find_scale_wave("ScientaChannelBegin", dataDF, scanDF, attrDF) - wave /Z ChannelEnd = find_scale_wave("ScientaChannelEnd", dataDF, scanDF, attrDF) - wave /Z SliceBegin = find_scale_wave("ScientaSliceBegin", dataDF, scanDF, attrDF) - wave /Z SliceEnd = find_scale_wave("ScientaSliceEnd", dataDF, scanDF, attrDF) - + wave /T /Z LensMode = ps_find_scale_wave("LensMode", data_df, scan_df, attr_df) + wave /Z ChannelBegin = ps_find_scale_wave("ScientaChannelBegin", data_df, scan_df, attr_df) + wave /Z ChannelEnd = ps_find_scale_wave("ScientaChannelEnd", data_df, scan_df, attr_df) + wave /Z SliceBegin = ps_find_scale_wave("ScientaSliceBegin", data_df, scan_df, attr_df) + wave /Z SliceEnd = ps_find_scale_wave("ScientaSliceEnd", data_df, scan_df, attr_df) + wave /Z ScientaChannels = ps_find_scale_wave("ScientaChannels", data_df, scan_df, attr_df) + // lens mode can give more detail if (waveexists(LensMode) && (numpnts(LensMode) >= 1)) strswitch(LensMode[0]) @@ -1764,17 +2292,20 @@ function ps_detect_scale(ax, lo, hi, un) if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1)) lo[%$kEnergyDimLabel] = ChannelBegin[0] hi[%$kEnergyDimLabel] = ChannelEnd[0] + elseif (waveexists(ScientaChannels) && (numpnts(ScientaChannels) >= 1)) + lo[%$kEnergyDimLabel] = ScientaChannels[0] + hi[%$kEnergyDimLabel] = ScientaChannels[numpnts(ScientaChannels)-1] endif if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1)) lo[%$kAngleDimLabel] = SliceBegin[0] hi[%$kAngleDimLabel] = SliceEnd[0] endif - wave /z /t /SDFR=scanDF ScanWritables + wave /z /t /SDFR=scan_df ScanWritables if (WaveExists(ScanWritables)) - wave /z /SDFR=scanDF scanner = $ScanWritables[0] + wave /z /SDFR=scan_df scanner = $ScanWritables[0] if (!WaveExists(scanner)) - wave /z /SDFR=attrDF scanner = $ScanWritables[0] + wave /z /SDFR=attr_df scanner = $ScanWritables[0] endif if (WaveExists(scanner) && (numpnts(scanner) >= 1)) lo[%$kScanDimLabel] = scanner[0] @@ -1908,8 +2439,7 @@ function ps_scale_dataset_2(data, ax, lo, hi, un) case "ScientaSpectrum": case "ImageEnergyDistribution": case "ScientaEnergyDistribution": - data *= kDetectorSensitivity - data_unit = "counts" + data_unit = "arb." data_label = "intensity" def = 0 break @@ -1935,663 +2465,91 @@ function ps_scale_dataset_2(data, ax, lo, hi, un) note /k data, snote end -/// load and reduce the ScientaImage dataset of the first scan of a PShell data file. +/// set the dimension scales of loaded PShell Scienta datasets according to attributes. /// -/// the resulting dataset is reduced in one image dimension by a user-defined reduction function, -/// e.g. by region-of-interest integration, curve fitting, etc. -/// cf. @ref adh5_default_reduction for further details. +/// datasets listed in the ScanReadables waves are scaled +/// according to the attribute waves in the data, scan, and attributes folders, +/// whichever is found first. /// -/// the function loads the dataset image by image using the hyperslab option -/// and applies a custom reduction function to each image. -/// the results from the reduction function are composed into one result wave. -/// the raw data are discarded. +/// the specified datafolder must contain the ScanReadables wave and the :attr folder. +/// the ScanReadables text wave contains names of the waves to scale. +/// wave names can include a relative path to a sub-folder. the path separator is "/". /// -/// if the data is from the electron analyser driver and some special attributes are included, -/// the function will set the scales of the image dimensions. +/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels(). +/// this is implicitly done by the high-level load functions. /// -/// by default, the reduction function is called in separate threads to reduce the total loading time. -/// (see the global variable psh5_perf_secs which reports the total run time of the function.) -/// the effect varies depending on the balance between file loading (image size) -/// and data processing (complexity of the reduction function). -/// for debugging the reduction function, multi-threading can be disabled. +/// @param scan_df scan data folder. must contain the ScanReadables wave. /// -/// @param ANickName destination folder name (top level under root). -/// -/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed. -/// -/// @param AFileName if empty a dialog box shows up. -/// -/// @param reduction_func custom data reduction function. -/// this can be any user-defined function which has the same parameters as @ref adh5_default_reduction. -/// some reduction functions are predefined in the @ref PearlScientaPreprocess module. -/// -/// @param reduction_param parameter string for the reduction function. -/// -/// @param dataset name of dataset to load, optionally including group path relative to scan (scan 1). -/// by default, the function looks for a ScientaImage dataset. -/// in a multi-region scan, this will be region 1. -/// to select region 2, e.g., use `dataset="region2/ScientaImage"`. -/// -/// @param progress progress window. -/// @arg 1 (default) show progress window -/// @arg 0 do not show progress window -/// -/// @param nthreads -/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread). -/// @arg 0 use main thread only (for debugging and profiling). -/// @arg >= 1 use a fixed number of (additional) threads. -/// -/// @return semicolon-separated list of the loaded dataset `ReducedData1`, `ReducedData2`, etc. if successful. -/// auxiliary waves, scan positions, attributes are loaded but not listed in the string. -/// empty string if an error occurred. -/// error messages are printed to the history. -/// -/// @return global string s_filepath in new data folder contains the full file path on disk. -/// -/// @return global string s_scanpaths in new data folder contains a list of scan groups inside the file. -/// -function /s psh5_load_reduced(ANickName, APathName, AFileName, reduction_func, reduction_param, [dataset, progress, nthreads]) - string ANickName - string APathName - string AFileName - funcref adh5_default_reduction reduction_func - string reduction_param - string dataset - variable progress - variable nthreads - - if (ParamIsDefault(progress)) - progress = 1 - endif - if (ParamIsDefault(nthreads)) - nthreads = -1 - endif - - dfref saveDF = GetDataFolderDFR() - - // performance monitoring - variable timerRefNum - variable /g psh5_perf_secs - timerRefNum = startMSTimer - - variable fileID = psh5_open_file(ANickName, APathName, AFileName) - string wavenames = "" - if (fileID) - dfref fileDF = GetDataFolderDFR() - svar s_filepath - svar s_scanpaths - AFileName = s_filepath - print "loading " + s_filepath + "\r" - - variable ig = 0 - variable ng = ItemsInList(s_scanpaths) - string scanpath - string folder - string positioners - string positioner - string positionerpath - - scanpath = StringFromList(ig, s_scanpaths) - folder = ReplaceString("/", scanpath, "") - folder = ReplaceString(" ", folder, "") - folder = PearlCleanupName(folder) - setdatafolder fileDF - newdatafolder /s /o $folder - dfref dataDF = GetDataFolderDFR() - positioners = psh5_load_scan_meta(fileID, scanpath) - newdatafolder /s /o attr - killwaves /a/z - psh5_load_scan_attrs(fileID, scanpath) - setdatafolder dataDF - wave /t /z ScanWritables - if (waveexists(ScanWritables) && (numpnts(ScanWritables) >= 1)) - positioner = ScanWritables[0] - if (strlen(positioner) > 0) - positionerpath = scanpath + "/" + positioner - positionerpath = ReplaceString("//", positionerpath, "/") - HDF5LoadData /O /Q /Z fileID, positionerpath +function ps_scale_datasets(scan_df) + dfref scan_df + + make /n=3 /free lo, hi + make /n=3 /t /free ax, un + wave /t /z /SDFR=scan_df ScanReadables + if (WaveExists(ScanReadables)) + variable isr + variable nsr = numpnts(ScanReadables) + variable nel + string ssr + string sds + for (isr = 0; isr < nsr; isr += 1) + ssr = ScanReadables[isr] + dfref data_df = psh5_dataset_to_folder(scan_df, ssr) + if (!DataFolderRefStatus(data_df)) + dfref data_df = scan_df endif - endif - - setdatafolder dataDF - string datasets = psh5_list_scan_datasets(fileID, scanpath, include_regions=1) - if (ParamIsDefault(dataset)) - dataset = select_dataset(datasets, "ScientaImage") - endif - wavenames = psh5_load_dataset_reduced(fileID, scanpath, dataset, reduction_func, reduction_param, progress=progress, nthreads=nthreads) - - psh5_close_file(fileID) - endif - - if (timerRefNum >= 0) - psh5_perf_secs = stopMSTimer(timerRefNum) / 1e6 - endif - - setdatafolder saveDF - return wavenames -end - - -/// load a reduced dataset from the open PShell HDF5 file. -/// -/// the function loads the dataset image by image using the hyperslab option -/// and applies a custom reduction function to each image. -/// the results from the reduction function are written to the `ReducedData1`, `ReducedData2`, etc. waves. -/// the raw data are discarded. -/// -/// by default, the reduction function is called in separate threads to reduce the total loading time. -/// (see the global variable psh5_perf_secs which reports the total run time of the function.) -/// the effect varies depending on the balance between file loading (image size) -/// and data processing (complexity of the reduction function). -/// for debugging the reduction function, multi-threading can be disabled. -/// -/// if the reduction function requires the image waves to be scaled properly, -/// the attributes must have been loaded by psh5_load_scan_attrs() before. -/// in this case, the scales of the result waves are also set by the function. -/// otherwise, the results can also be scaled by ps_scale_dataset() later. -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to scan group in the HDF5 file. -/// -/// @param datasetname name of the dataset. -/// this must currently be "ScientaImage", other data is not supported. -/// the name of the loaded wave is a cleaned up version of the dataset name. -/// the name can include the region name as a relative path, e.g. "region1/ScientaImage". -/// in this case, the dataset is loaded into a sub-folder named "region1". -/// -/// @param reduction_func custom data reduction function. -/// this can be any user-defined function which has the same parameters as @ref adh5_default_reduction. -/// some reduction functions are predefined in the @ref PearlScientaPreprocess module. -/// -/// @param reduction_param parameter string for the reduction function. -/// -/// @param progress progress window. -/// @arg 1 (default) show progress window -/// @arg 0 do not show progress window -/// -/// @param nthreads -/// @arg -1 (default) use as many threads as there are processor cores (in addition to main thread). -/// @arg 0 use main thread only (for debugging and profiling). -/// @arg >= 1 use a fixed number of (additional) threads. -/// -/// @return semicolon-separated list of the loaded dataset `ReducedData1`, `ReducedData2`, etc. if successful. -/// auxiliary waves, scan positions, attributes are loaded but not listed in the string. -/// empty string if an error occurred. -/// error messages are printed to the history. -/// -/// @version this function supports regions as of version 1.03. -/// -function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_func, reduction_param, [progress, nthreads]) - variable fileID - string scanpath - string datasetname - funcref adh5_default_reduction reduction_func - string reduction_param - variable progress - variable nthreads - - if (ParamIsDefault(progress)) - progress = 1 - endif - if (ParamIsDefault(nthreads)) - nthreads = -1 - endif - - dfref base_df = GetDataFolderDFR() - variable result = 0 - string datasetpath - string datawavename - string wavenames = "" - - datasetpath = scanpath + "/" + datasetname - datasetpath = ReplaceString("//", datasetpath, "/") - datawavename = StringFromList(ItemsInList(datasetpath, "/") - 1, datasetpath, "/") - - string regionname - string regionpath - if (ItemsInList(datasetname, "/") >= 2) - regionname = StringFromList(0, datasetname, "/") - regionpath = ReplaceString("//", scanpath + "/" + regionname, "/") - datasetname = RemoveListItem(0, datasetname, "/") - NewDataFolder /o/s $regionname - else - regionname = "" - regionpath = scanpath - endif - - STRUCT HDF5DataInfo di // Defined in HDF5 Browser.ipf. - InitHDF5DataInfo(di) - variable err = HDF5DatasetInfo(fileID, datasetpath, 0, di) - if (err != 0) - print "error accessing detector/data" - result = -1 - return wavenames - endif - if (di.ndims < 2) - print "error: rank of dataset < 2" - result = -2 - return wavenames - elseif (di.ndims < 3) - progress = 0 - endif - - variable idx, idy, idz, idt - variable transpose = WhichListItem(datawavename, kTransposedDatasets) >= 0 - if (transpose) - idx = 1 - idy = 0 - else - idx = 0 - idy = 1 - endif - idz = 2 - idt = 3 - - variable nx, ny, nz, nt, nzt - nx = di.dims[idx] - ny = di.dims[idy] - nz = di.dims[idz] - nt = di.dims[idt] - // adjust singleton dimensions - nz = max(nz, 1) - nt = max(nt, 1) - nzt = nz * nt - - // load data image by image - HDF5MakeHyperslabWave("slab", max(di.ndims, 4)) - wave slab - slab[][%Start] = 0 - slab[][%Stride] = 1 - slab[][%Count] = 1 - slab[][%Block] = 1 - slab[idx][%Block] = nx - slab[idy][%Block] = ny - - // set up multi threading - if (nthreads < 0) - nthreads = ThreadProcessorCount - endif - if (nthreads > 0) - variable threadGroupID = ThreadGroupCreate(nthreads) - variable ithread - for (ithread = 0; ithread < nthreads; ithread += 1) - ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func) - endfor - else - make /n=(nzt) /df /free processing_folders - endif - - if (progress) - display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt) - endif - - // create a template wave with the correct scales and labels - make /n=(nx,ny) /d /o $datawavename - wave template = $datawavename - ps_set_dimlabels2(template, datawavename) - ps_scale_dataset(template) - - variable iz, it, izt - string dfname - variable iw, nw - string sw - make /n=0 /free /wave result_waves - - izt = 0 - for (iz = 0; iz < nz; iz += 1) - for (it = 0; it < nt; it += 1) - // load hyperslab - slab[idz][%Start] = iz - slab[idt][%Start] = it - dfname = "processing_" + num2str(izt) - newdatafolder /s $dfname - HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath - - // send to processing queue - duplicate template, image - variable /g r_index = iz - variable /g s_index = it - string /g func_param = reduction_param - - if (nthreads > 0) - WaveClear image - ThreadGroupPutDF threadGroupID, : - else - processing_folders[izt] = GetDataFolderDFR() - make /n=1/d profile1, profile2 - wave slabdata - wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param) - variable /g func_result = numpnts(reduced_waves) - adh5_get_result_waves(reduced_waves, "redw_", 0) - WaveClear slabdata, image, reduced_waves - setdatafolder :: - endif - - izt += 1 - // progress window - if (progress) - if (update_progress_panel(izt)) - print "user abort" - result = -4 - break - endif + nel = ItemsInList(ssr, "/") + sds = StringFromList(nel - 1, ssr, "/") + wave /z /sdfr=data_df wsr=$sds + if (WaveExists(wsr)) + ps_detect_scale(data_df, ax, lo, hi, un) + ps_scale_dataset_2(wsr, ax, lo, hi, un) endif endfor - endfor - - killwaves /z slab, slabdata, template - if (progress) - update_progress_panel(0, message="Processing data (step 2 of 2)...") endif - - dfref dfr - for (izt = 0; (izt < nzt) && (result == 0); izt += 1) - if (nthreads > 0) - do - if (progress) - if (update_progress_panel(izt)) - print "user abort" - result = -4 - break - endif - endif - dfr = ThreadGroupGetDFR(threadGroupID, 1000) - if (DatafolderRefStatus(dfr) != 0) - break - endif - while (1) - else - if (progress) - if (update_progress_panel(izt)) - print "user abort" - result = -4 - break - endif - endif - dfr = processing_folders[izt] - endif - - if (result != 0) - break - endif - - nvar rr = dfr:r_index - nvar ss = dfr:s_index - nvar func_result = dfr:func_result - - if (func_result < 1) - print "error during data reduction." - result = -3 - break - endif - - if (numpnts(result_waves) == 0) - redimension /n=(func_result) result_waves - for (iw = 0; iw < func_result; iw += 1) - sw = "redw_" + num2str(iw) - wave profile = dfr:$sw - sw = "ReducedData" + num2str(iw+1) - make /n=(dimsize(profile, 0), nz, nt) /d /o $sw - wave data = $sw - setdimlabel 0, -1, $getdimlabel(profile, 0, -1), data - setdimlabel 1, -1, $kScanDimLabel, data - note data, note(profile) - ps_scale_dataset(data) - setscale /p x dimoffset(profile, 0), dimdelta(profile, 0), waveunits(profile, 0), data - setscale d 0, 0, waveunits(profile, -1), data - result_waves[iw] = data - endfor - endif - for (iw = 0; iw < func_result; iw += 1) - sw = "redw_" + num2str(iw) - wave profile = dfr:$sw - wave data = result_waves[iw] - data[][rr][ss] = profile[p] - endfor - endfor - - if (nthreads > 0) - variable tstatus = ThreadGroupRelease(threadGroupID) - if (tstatus == -2) - print "error: thread did not terminate properly." - result = -5 - endif - else - for (izt = 0; izt < nzt; izt += 1) - KillDataFolder /Z processing_folders[izt] - endfor - endif - - if (result == 0) - nw = numpnts(result_waves) - wavenames = "" - for (iw = 0; iw < nw; iw += 1) - wave data = result_waves[iw] - if (nz == 1) - redimension /n=(-1, 0, 0) data - elseif (nt == 1) - redimension /n=(-1, nz, 0) data - endif - wavenames += nameofwave(data) + ";" - endfor - endif - if (progress) - kill_progress_panel() - endif - - setdatafolder base_df - return wavenames end -threadsafe static function reduce_slab_worker(reduction_func) - funcref adh5_default_reduction reduction_func - do - // wait for job from main thread - do - dfref dfr = ThreadGroupGetDFR(0, 1000) - if (DataFolderRefStatus(dfr) == 0) - if (GetRTError(2)) - return 0 // no more jobs - endif - else - break - endif - while (1) +/// set the dimension scales of a loaded PShell Scienta dataset according to attributes. +/// +/// the current datafolder must contain the :attr folder. +/// the data wave can be in the current folder or a sub-folder. +/// +/// the dimension labels of the dataset waves must have been set correctly, e.g. by ps_set_dimlabels(). +/// this is implicitly done by the high-level load functions. +/// +/// the function is useful if a single dataset is loaded and scaled. +/// if multiple datasets are loaded, ps_scale_datasets() is slightly more efficient. +/// +/// @param data data wave to be scaled. +/// dimension labels (index -1) must be set correctly, cf. ps_set_dimlabels(). +/// +/// @version this function supports regions from version 1.03. +/// +function ps_scale_dataset(data) + wave data - // get input data - wave slabdata = dfr:slabdata - wave image = dfr:image - svar func_param = dfr:func_param - nvar rr = dfr:r_index - nvar ss = dfr:s_index - - // do the work - newdatafolder /s outDF - variable /g r_index = rr - variable /g s_index = ss - wave /wave reduced_waves = reduce_slab_image(slabdata, image, reduction_func, func_param) - variable /g func_result = numpnts(reduced_waves) - - // send output to queue and clean up - adh5_get_result_waves(reduced_waves, "redw_", 0) - WaveClear slabdata, image, reduced_waves - ThreadGroupPutDF 0, : - KillDataFolder dfr - while (1) + dfref save_df = GetDataFolderDFR() + dfref data_df = GetWavesDataFolderDFR(data) - return 0 + setdatafolder data_df + make /n=3 /free lo, hi + make /n=3 /t /free ax, un + ps_detect_scale(data_df, ax, lo, hi, un) + ps_scale_dataset_2(data, ax, lo, hi, un) + setdatafolder save_df end -threadsafe static function /wave reduce_slab_image(slabdata, image, reduction_func, reduction_param) - wave slabdata - wave image - funcref adh5_default_reduction reduction_func - string reduction_param - - // the multiplication by detector sensitivity assumes that we are loading a ScientaImage. - image = slabdata[q][p][0][0] * kDetectorSensitivity - return reduction_func(image, reduction_param) -end - -/// load descriptive info from a PShell data file. -/// -/// the info string lists the following information for each scan contained in the file: -/// - path of the scan group inside the file. -/// - number of scan positions. -/// - dataset names of scan positioners. -/// - dataset names of detectors. -/// -/// @param APathName igor symbolic path name. can be empty if the path is specified in AFileName or a dialog box should be displayed -/// -/// @param AFileName if empty a dialog box shows up -/// -/// @return newline terminated string. -/// -function /s psh5_load_info(APathName, AFileName) - string APathName - string AFileName - - dfref saveDF = GetDataFolderDFR() - dfref fileDF = NewFreeDataFolder() - setdatafolder fileDF - - variable fileID - string filepath - string scanpaths - variable nscans - variable iscan - string scanpath - string info = "" - - HDF5OpenFile /P=$APathName /R fileID as AFileName - if (v_flag == 0) - filepath = s_path + s_filename - scanpaths = psh5_list_scans(fileID) - nscans = ItemsInList(scanpaths) - for (iscan = 0; iscan < nscans; iscan += 1) - scanpath = StringFromList(iscan, scanpaths) - info = info + scanpath + "\r" - info = info + psh5_load_scan_info(fileID, scanpath) - endfor - HDF5CloseFile fileID - endif - - setdatafolder saveDF - return info -end - -/// load descriptive info from a PShell scan. -/// -/// the info string contains up to three lines which are made up of the following information: -/// - number of scan positions. -/// - dataset names of scan positioners. -/// - dataset names of detectors (without region names). -/// - region names -/// -/// @param fileID ID of open HDF5 file from psh5_open_file(). -/// -/// @param scanpath path to scan group in the HDF5 file. -/// -/// @return newline terminated string. -/// -function /s psh5_load_scan_info(fileID, scanpath) - variable fileID - string scanpath - - string info = "" - string positions = "" - string positioners = "" - string readables = "" - string detectors = "" - string regions = "" - - psh5_load_scan_meta(fileID, scanpath) - - wave /z ScanDimensions - wave /t /z ScanWritables - wave /t /z ScanReadables - wave /z ScanSteps - - if (WaveExists(ScanSteps) && (numpnts(ScanSteps) >= 1)) - ScanSteps += 1 - positions = "positions = (" + wave2list(ScanSteps, "%u", ",") + ")" - info = AddListItem(positions, info, "\r", inf) - endif - if (WaveExists(ScanWritables) && (numpnts(ScanWritables) >= 1)) - positioners = "positioners = " + twave2list(ScanWritables, ",") - info = AddListItem(positioners, info, "\r", inf) - endif - - variable i, m, n - string s - if (WaveExists(ScanReadables) && (numpnts(ScanReadables) >= 1)) - readables = twave2list(ScanReadables, ",") - n = ItemsInList(readables, ",") - for (i = 0; i < n; i += 1) - s = StringFromList(i, readables, ",") - m = ItemsInList(s, "/") - if (m > 1) - s = StringFromList(m - 1, s, "/") - endif - if (WhichListItem(s, detectors, ",") < 0) - detectors = AddListItem(s, detectors, ",", inf) - endif - endfor - detectors = "detectors = " + detectors - info = AddListItem(detectors, info, "\r", inf) - endif - - regions = psh5_list_scan_regions(fileID, scanpath) - if (strlen(regions) > 0) - regions = "regions = " + regions - info = AddListItem(regions, info, "\r", inf) - endif - - return info -end - -/// convert text wave to list. -/// -/// -static function /s twave2list(wt, sep) - wave /t wt - string sep - - string list = "" - variable n = numpnts(wt) - variable i - for (i = 0; i < n; i += 1) - list = AddListItem(wt[i], list, sep, inf) - endfor - - return list -end - -/// convert numeric wave to list. -/// -/// -static function /s wave2list(w, format, sep) - wave w - string format - string sep - - string list = "" - variable n = numpnts(w) - variable i - string s - for (i = 0; i < n; i += 1) - sprintf s, format, w[i] - list = AddListItem(s, list, sep, inf) - endfor - - return list -end +// ====== miscellaneous functions ====== /// kill any waves matching a pattern in the experiment /// -/// this may be used to kill big waves of original data before saving +/// this may be used to kill big waves of original data before saving. +/// +/// example: to kill all ScientaImage waves: +/// ~~~~~~ +/// kill_matching_waves($"root:", "ScientaImage", 1) +/// ~~~~~~ /// function /s kill_matching_waves(dfr, pattern, recurse, [killed]) DFREF dfr diff --git a/pearl/pearl-scienta-preprocess.ipf b/pearl/pearl-scienta-preprocess.ipf index dd6302e..d4fede3 100644 --- a/pearl/pearl-scienta-preprocess.ipf +++ b/pearl/pearl-scienta-preprocess.ipf @@ -853,6 +853,148 @@ threadsafe function /wave gauss4_reduction(source, param) return result_waves end +threadsafe function /wave gauss6_reduction(source, param) + wave source + string ¶m + + variable nx = dimsize(source, 0) + variable ny = dimsize(source, 1) + + // read parameters + variable rngl = NumberByKey("rngl", param, "=", ";") + variable rngh = NumberByKey("rngh", param, "=", ";") + variable pos1 = NumberByKey("pos1", param, "=", ";") + variable wid1 = NumberByKey("wid1", param, "=", ";") + variable pos2 = NumberByKey("pos2", param, "=", ";") + variable wid2 = NumberByKey("wid2", param, "=", ";") + variable pos3 = NumberByKey("pos3", param, "=", ";") + variable wid3 = NumberByKey("wid3", param, "=", ";") + variable pos4 = NumberByKey("pos4", param, "=", ";") + variable wid4 = NumberByKey("wid4", param, "=", ";") + variable pos5 = NumberByKey("pos5", param, "=", ";") + variable wid5 = NumberByKey("wid5", param, "=", ";") + variable pos6 = NumberByKey("pos6", param, "=", ";") + variable wid6 = NumberByKey("wid6", param, "=", ";") + variable npeaks = NumberByKey("npeaks", param, "=", ";") + variable ybox = NumberByKey("ybox", param, "=", ";") + + // prepare curve fit + variable ipk + make /free xprof + adh5_setup_profile(source, xprof, 0) + duplicate /free xprof, xprof_sig + variable pl = max(x2pnt(xprof, rngl), 0) + variable ph = min(x2pnt(xprof, rngh), numpnts(xprof) - 1) + + make /free /n=(npeaks) peak_coef + peak_coef = p * 3 + 2 + variable n_coef = npeaks * 3 + 2 + make /free /d /n=(n_coef) w_coef, W_sigma + w_coef[0] = {0, 0, 1, pos1, wid1, 1, pos2, wid2, 1, pos3, wid3, 1, pos4, wid4, 1, pos5, wid5, 1, pos6, wid6} + redimension /n=(n_coef) w_coef, w_sigma + + // text constraints cannot be used in threadsafe functions. + // the following matrix-vector forumlation is equivalent to: + // make /free /T /N=6 constraints + // constraints[0] = {"K2 >= 0", "K5 >= 0", "K8 >= 0", "K11 >= 0", "K1 <= 0", "K0 => 0"} + make /free /n=(npeaks + 2, numpnts(w_coef)) cmat + make /free /n=(npeaks + 2) cvec + cmat = 0 + cmat[0][0] = -1 + cmat[1][1] = 1 + cvec = 0 + + string hold = "00" + for (ipk=0; ipk < npeaks; ipk += 1) + hold += "011" + cmat[2 + ipk][2 + ipk*3] = -1 + endfor + + // prepare output + make /free /n=(npeaks * 2) /wave result_waves + string s_note + for (ipk = 0; ipk < npeaks; ipk += 1) + make /free /n=0 pk_int + adh5_setup_profile(source, pk_int, 1) + pk_int = nan + sprintf s_note, "AxisLabelD=peak %u integral", ipk+1 + Note pk_int, s_note + sprintf s_note, "KineticEnergy=%.3f", w_coef[3 + ipk * 3] + Note pk_int, s_note + result_waves[ipk] = pk_int + + make /free /n=0 pk_sig + adh5_setup_profile(source, pk_sig, 1) + pk_sig = nan + sprintf s_note, "AxisLabelD=peak %u sigma", ipk+1 + Note pk_sig, s_note + sprintf s_note, "KineticEnergy=%.3f", w_coef[3 + ipk * 3] + Note pk_sig, s_note + result_waves[ipk + npeaks] = pk_sig + + waveclear pk_int, pk_sig + endfor + + // loop over angle scale + variable p0 = 0 + variable p1 = dimsize(source, 1) - 1 + variable pp + variable wmin + variable wmax + if (ybox > 1) + p0 += ceil((ybox - 1) / 2) + p1 -= ceil((ybox - 1) / 2) + endif + variable V_FitNumIters + variable V_FitError + + for (pp = p0; pp <= p1; pp += 1) + // box average + xprof = source[p][pp] + if (ybox > 1) + xprof += source[p][pp-1] + source[p][pp+1] + endif + xprof_sig = max(sqrt(xprof), 1) + xprof /= ybox + xprof_sig /= ybox + + // generate guess + wmin = wavemin(xprof) + wmax = wavemax(xprof) + w_coef[0] = wmin + w_coef[1] = 0 + for (ipk=0; ipk < npeaks; ipk += 1) + w_coef[2 + ipk*3] = wmax - wmin + endfor + + V_FitError = 0 + FuncFit /H=hold /Q /NTHR=1 /N /W=2 MultiGaussLinBG_AO w_coef xprof[pl,ph] /C={cmat, cvec} /I=1 /W=xprof_sig[pl,ph] + wave w_sigma + + // retrieve results, leave them at nan if the fit did not converge + if (V_FitNumIters < 40) + for (ipk = 0; ipk < npeaks; ipk += 1) + wave val = result_waves[ipk] + wave sig = result_waves[ipk + npeaks] + val[pp] = max(w_coef[peak_coef[ipk]], 0) + sig[pp] = max(w_sigma[peak_coef[ipk]], 0) + endfor + endif + endfor + + // calculate integral + for (ipk = 0; ipk < npeaks; ipk += 1) + wave val = result_waves[ipk] + wave sig = result_waves[ipk + npeaks] + val *= w_coef[peak_coef[ipk] + 2] * sqrt(pi) + sig *= w_coef[peak_coef[ipk] + 2] * sqrt(pi) + endfor + + return result_waves +end + + + /// find peak positions for the gauss-fit reduction function ///