update pshell explorer and data import, misc. improvements

FEATURES

- pshell: convert scienta data to true counts
- pre-process: add gauss2_reduction data reduction function
- anglescan: add set_contrast and normalize_strip_phi functions
- explorer: show info about multi-region scans
- documentation: add detailed instructions for angle-scan processing

BUGFIXES

- explorer: fix attributes notebook
- pshell: fix progress bar
- elog: increase the number of accepted attachments
This commit is contained in:
2017-09-21 12:36:30 +02:00
parent 0a436db00b
commit cf1399e59c
137 changed files with 1824 additions and 853 deletions

View File

@ -75,6 +75,9 @@ strconstant kScientaScalingDatasets = "LensMode;ScientaChannelBegin;ScientaChann
/// List of datasets that should be transposed upon loading
strconstant kTransposedDatasets = "ScientaImage;"
/// multiply scienta detector intensity by this value to get actual counts.
constant kDetectorSensitivity = 4
/// open a HDF5 file created by the PShell data acquisition program and prepare the data folder.
///
/// the function opens a specified or interactively selected HDF5 file,
@ -216,11 +219,6 @@ end
/// the data wave is loaded into the current data folder.
/// attributes are loaded into the attr subfolder. existing waves in attr are deleted.
///
/// @warning EXPERIMENTAL
/// this function uses the root:pearl_area:preview data folder. existing data there may be deleted!
///
/// @param ANickName destination wave name. the wave is created in the current data folder.
///
/// @param APathName igor symbolic path name. can be empty if the path is specified in FileName or a dialog box should be displayed
///
/// @param AFileName if empty a dialog box shows up
@ -242,8 +240,7 @@ end
///
/// @return name of loaded preview wave.
///
function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_attr, pref_scans, pref_datasets])
string ANickName
function /s psh5_load_preview(APathName, AFileName, [load_data, load_attr, pref_scans, pref_datasets])
string APathName
string AFileName
variable load_data
@ -265,9 +262,6 @@ function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_
endif
dfref saveDF = GetDataFolderDFR()
setdatafolder root:
newdatafolder /o/s pearl_area
newdatafolder /o/s preview
variable fileID
string scanpaths = ""
@ -319,16 +313,6 @@ function /s psh5_load_preview(ANickName, APathName, AFileName, [load_data, load_
setdatafolder fileDF
dataname = psh5_load_scan_preview(fileID, sg, set_scale=load_attr, pref_datasets=pref_datasets)
wave /z data = $dataname
string destpath = GetDataFolder(1, saveDF) + ANickName
if (waveexists(data))
duplicate /o data, $destpath
wave /z data = $destpath
else
print "no data found in file " + AFileName
endif
else
print "no scans found in file " + AFileName
endif
@ -903,7 +887,6 @@ function /s psh5_load_scan_preview(fileID, scanpath, [set_scale, pref_datasets])
setdatafolder dataDF
newdatafolder /o/s attr
killwaves /a/z
psh5_load_scan_attrs(fileID, scanpath, attr_sets=2)
setdatafolder dataDF
ps_scale_dataset(data)
@ -999,7 +982,7 @@ function /s psh5_load_scan_section(fileID, scanpath, dim, [set_scale, pref_datas
ny = di.dims[idy]
nz = di.dims[idz]
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
wave slab
slab[][%Start] = 0
slab[][%Stride] = 1
@ -1197,7 +1180,7 @@ function /s psh5_load_dataset_slabs(fileID, datapath, datasetname, [progress])
endif
// load data image by image
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
wave slab
slab[][%Start] = 0
slab[][%Stride] = 1
@ -1319,7 +1302,7 @@ function /s psh5_load_dataset_slab(fileID, datapath, datasetname, dim2start, dim
wave data = $datawavename
data = 0
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
wave slab
slab[][%Start] = 0
slab[][%Stride] = 1
@ -1794,6 +1777,16 @@ function ps_scale_dataset_2(data, ax, lo, hi, un)
string data_label = ax[%$kDataDimLabel]
if (cmpstr(data_unit, "arb.") == 0)
strswitch(NameOfWave(data))
case "ScientaImage":
case "ImageAngleDistribution":
case "ScientaAngleDistribution":
case "ScientaSpectrum":
case "ImageEnergyDistribution":
case "ScientaEnergyDistribution":
data *= kDetectorSensitivity
data_unit = "counts"
data_label = "intensity"
break
case "SampleCurrent":
case "RefCurrent":
case "AuxCurrent":
@ -1948,8 +1941,9 @@ end
/// @param scanpath path to scan group in the HDF5 file.
///
/// @param datasetname name of the dataset.
/// this must currently be "ScientaImage", other data is not supported.
/// the name of the loaded wave is a cleaned up version of the dataset name.
/// the name can include the region name as a relative path, e.g. "region1/ScientaSpectrum".
/// the name can include the region name as a relative path, e.g. "region1/ScientaImage".
/// in this case, the dataset is loaded into a sub-folder named "region1".
///
/// @param reduction_func custom reduction function
@ -2050,7 +2044,7 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
nzt = nz * nt
// load data image by image
HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
HDF5MakeHyperslabWave("slab", max(di.ndims, 4))
wave slab
slab[][%Start] = 0
slab[][%Stride] = 1
@ -2076,11 +2070,12 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
if (progress)
display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt)
endif
make /n=(nx,ny) /d /o image_template
setdimlabel 0, -1, $kEnergyDimLabel, image_template
setdimlabel 1, -1, $kAngleDimLabel, image_template
ps_scale_dataset(image_template)
// create a template wave with the correct scales and labels
make /n=(nx,ny) /d /o $datawavename
wave template = $datawavename
ps_set_dimlabels2(template, datawavename)
ps_scale_dataset(template)
variable iz, it, izt
string dfname
@ -2093,9 +2088,9 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
dfname = "processing_" + num2str(izt)
newdatafolder /s $dfname
HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetpath
// send to processing queue
duplicate image_template, image
duplicate template, image
variable /g r_index = iz
variable /g s_index = it
string /g func_param = reduction_param
@ -2125,7 +2120,7 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
endfor
endfor
killwaves /z slab, slabdata, image_template
killwaves /z slab, slabdata, template
if (progress)
update_progress_panel(0, message="Processing data (step 2 of 2)...")
endif
@ -2134,10 +2129,6 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
for (izt = 0; (izt < nzt) && (result == 0); izt += 1)
if (nthreads > 0)
do
dfr = ThreadGroupGetDFR(threadGroupID, 1000)
if (DatafolderRefStatus(dfr) != 0)
break
endif
if (progress)
if (update_progress_panel(izt))
print "user abort"
@ -2145,9 +2136,12 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
break
endif
endif
dfr = ThreadGroupGetDFR(threadGroupID, 1000)
if (DatafolderRefStatus(dfr) != 0)
break
endif
while (1)
else
dfr = processing_folders[izt]
if (progress)
if (update_progress_panel(izt))
print "user abort"
@ -2155,6 +2149,7 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
break
endif
endif
dfr = processing_folders[izt]
endif
if (result != 0)
@ -2175,6 +2170,8 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
setdimlabel 0, -1, $getdimlabel(profile2, 0, -1), ReducedData2
setdimlabel 1, -1, $kScanDimLabel, ReducedData1
setdimlabel 1, -1, $kScanDimLabel, ReducedData2
ps_scale_dataset(ReducedData1)
ps_scale_dataset(ReducedData2)
setscale /p x dimoffset(profile1, 0), dimdelta(profile1, 0), waveunits(profile1, 0), ReducedData1
setscale /p x dimoffset(profile2, 0), dimdelta(profile2, 0), waveunits(profile2, 0), ReducedData2
setscale d 0, 0, waveunits(profile1, -1), ReducedData1
@ -2210,8 +2207,6 @@ function /s psh5_load_dataset_reduced(fileID, scanpath, datasetname, reduction_f
redimension /n=(-1, nz, 0) ReducedData2
endif
wavenames = "ReducedData1;ReducedData2;"
ps_scale_dataset(ReducedData1)
ps_scale_dataset(ReducedData2)
endif
if (progress)
kill_progress_panel()
@ -2268,7 +2263,8 @@ threadsafe static function reduce_slab_image(slabdata, image, profile1, profile2
funcref adh5_default_reduction reduction_func
string reduction_param
image = slabdata[q][p][0][0]
// the multiplication by detector sensitivity assumes that we are loading a ScientaImage.
image = slabdata[q][p][0][0] * kDetectorSensitivity
return reduction_func(image, profile1, profile2, reduction_param)
end
@ -2325,7 +2321,8 @@ end
/// the info string contains up to three lines which are made up of the following information:
/// - number of scan positions.
/// - dataset names of scan positioners.
/// - dataset names of detectors.
/// - dataset names of detectors (without region names).
/// - region names
///
/// @param fileID ID of open HDF5 file from psh5_open_file().
///
@ -2340,9 +2337,12 @@ function /s psh5_load_scan_info(fileID, scanpath)
string info = ""
string positions = ""
string positioners = ""
string readables = ""
string detectors = ""
string regions = ""
psh5_load_scan_meta(fileID, scanpath)
wave /z ScanDimensions
wave /t /z ScanWritables
wave /t /z ScanReadables
@ -2357,10 +2357,31 @@ function /s psh5_load_scan_info(fileID, scanpath)
positioners = "positioners = " + twave2list(ScanWritables, ",")
info = AddListItem(positioners, info, "\r", inf)
endif
variable i, m, n
string s
if (WaveExists(ScanReadables) && (numpnts(ScanReadables) >= 1))
detectors = "detectors = " + twave2list(ScanReadables, ",")
readables = twave2list(ScanReadables, ",")
n = ItemsInList(readables, ",")
for (i = 0; i < n; i += 1)
s = StringFromList(i, readables, ",")
m = ItemsInList(s, "/")
if (m > 1)
s = StringFromList(m - 1, s, "/")
endif
if (WhichListItem(s, detectors, ",") < 0)
detectors = AddListItem(s, detectors, ",", inf)
endif
endfor
detectors = "detectors = " + detectors
info = AddListItem(detectors, info, "\r", inf)
endif
regions = psh5_list_scan_regions(fileID, scanpath)
if (strlen(regions) > 0)
regions = "regions = " + regions
info = AddListItem(regions, info, "\r", inf)
endif
return info
end