PEARL Procedures  rev-distro-1.6.0-0-gcf1399e-dirty
Igor procedures for the analysis of PEARL data
pearl-area-import.ipf
Go to the documentation of this file.
1 #pragma rtGlobals=3// Use modern global access method and strict wave access.
2 #pragma IgorVersion = 6.2
3 #pragma ModuleName = PearlAreaImport
4 #pragma version = 1.06
5 #include <HDF5 Browser>
6 #include "pearl-gui-tools"
7 
8 // copyright (c) 2013-16 Paul Scherrer Institut
9 //
10 // Licensed under the Apache License, Version 2.0 (the "License");
11 // you may not use this file except in compliance with the License.
12 // You may obtain a copy of the License at
13 // http:///www.apache.org/licenses/LICENSE-2.0
14 
26 
31 
34 static variable BeforeFileOpenHook(variable refNum, string fileName, string path, string type, string creator, variable kind){
35  variable refNum, kind
36  string fileName, path, type, creator
37 
38  variable handledOpen = 0
39 
40  //PathInfo $path
41  //string FilePath = s_path + filename
42  string NickName = CleanupName(ParseFilePath(3, FileName, ":", 0, 0), 0)
43  string FileExt = LowerStr(ParseFilePath(4, FileName, ":", 0, 0))
44  string result = ""
45 
46  // override nickname with custom setting
47  svar /z cnn = gsCustomNickName
48  if (svar_exists(cnn))
49  if (exists("gvNickNameIndex") != 2)
50  variable/g gvNickNameIndex = 1
51  endif
52  nvar nni = gvNickNameIndex
53  NickName = cnn + num2str(nni)
54  nni += 1
55  endif
56 
57  if (stringmatch(FileExt, "h5") == 1)
58  result = adh5_load_complete(NickName, path, FileName)
59  endif
60 
61  string/g s_latest_datafile = result
62  string/g s_latest_nickname = nickname
63 
64  handledOpen = strlen(result) > 0
65  if (handledOpen)
66  close refnum
67  endif
68 
69  return handledOpen// 1 tells Igor not to open the file
70 };
71 
98 string ad_suggest_foldername(string filename, variable ignoredate = defaultValue, string sourcename = defaultValue, variable unique = defaultValue){
99  string filename
100  variable ignoredate
101  string sourcename
102  variable unique
103 
104  if (ParamIsDefault(ignoredate))
105  ignoredate = 0
106  endif
107  if (ParamIsDefault(unique))
108  unique = 0
109  endif
110 
111  string basename = ParseFilePath(3, filename, ":", 0, 0)
112  string extension = ParseFilePath(4, filename, ":", 0, 0)
113  string nickname
114 
115  string autosource
116  if (strsearch(basename, "scienta", 0, 2) >= 0)
117  autosource = "sci"
118  else if (strsearch(basename, "pshell", 0, 2) >= 0)
119  autosource = "psh"
120  else if (strsearch(basename, "OP-SL", 0, 2) >= 0)
121  autosource = "sl"
122  else if (strsearch(basename, "ES-PS", 0, 2) >= 0)
123  autosource = "es"
124  else
125  autosource = "xy"
126  endif
127  if (ParamIsDefault(sourcename))
128  sourcename = autosource
129  endif
130 
131  variable nparts = ItemsInList(basename, "-")
132  if (nparts >= 3)
133  string datepart = StringFromList(1, basename, "-")
134  variable l_datepart = strlen(datepart)
135  if (l_datepart == 8)
136  datepart = datepart[l_datepart-6, l_datepart-1]
137  endif
138  string indexpart = StringFromList(2, basename, "-")
139  if (ignoredate)
140  sprintf nickname, "%s_%s", sourcename, indexpart
141  else
142  sprintf nickname, "%s_%s_%s", sourcename, datepart, indexpart
143  endif
144  else
145  nickname = CleanupName(basename, 0)
146  endif
147 
148  if (unique && CheckName(nickname, 11))
149  nickname = UniqueName(nickname + "_", 11, 0)
150  endif
151 
152  return nickname
153 };
154 
159 variable ad_load_dialog(string APathName){
160  string APathName
161 
162  variable refNum
163  string message = "Select data files"
164  string filepaths
165  string filefilters = "Area Detector HDF5 Files (*.h5):.h5;"
166  filefilters += "All Files:.*;"
167 
168  PathInfo /S $APathName
169  Open /D /R /F=filefilters /M=message /MULT=1 refNum
170  filepaths = S_fileName
171 
172  dfref saveDF = GetDataFolderDFR()
173  setdatafolder root:
174 
175  if (strlen(filepaths) > 0)
176  variable nfiles = ItemsInList(filepaths, "\r")
177  variable ifile
178  for(ifile = 0; ifile < nfiles; ifile += 1)
179  String path = StringFromList(ifile, filepaths, "\r")
180  string nickname = ad_suggest_foldername(path)
181  adh5_load_complete(nickname, "", path)
182  endfor
183  endif
184 
185  setdatafolder saveDF
186 };
187 
199 string adh5_load_complete(string ANickName, string APathName, string AFileName, variable load_data = defaultValue, variable load_attr = defaultValue){
200  string ANickName
201  string APathName
202  string AFileName
203  variable load_data
204  variable load_attr
205 
206  if (ParamIsDefault(load_data))
207  load_data = 1
208  endif
209  if (ParamIsDefault(load_attr))
210  load_attr = 1
211  endif
212 
213  dfref saveDF = GetDataFolderDFR()
214  setdatafolder root:
215  newdatafolder /s/o $("root:" + ANickName)
216 
217  // open file
218  variable fileID
219  string instrumentpath = "/entry/instrument/"
220  string detectorpath = instrumentpath + "detector/"
221  string attributespath = instrumentpath + "NDAttributes/"
222  string datasetname
223  string datawavename
224 
225  // performance monitoring
226  variable timerRefNum
227  variable /g adh5_perf_secs
228  timerRefNum = startMSTimer
229 
230  // avoid compilation error if HDF5 XOP has not been loaded
231  #if Exists("HDF5OpenFile")
232  HDF5OpenFile /P=$APathName/R fileID as AFileName
233  if (v_flag == 0)
234  AFileName = s_path + s_filename
235  print "loading " + s_filename + "\r"
236 
237  if (load_data)
238  adh5_load_detector_slabs(fileID, detectorpath)
239  endif
240  if (load_attr)
241  newdatafolder /o/s attr
242  adh5_loadattr_all(fileID, attributespath)
243  setdatafolder ::
244  endif
245 
246  wave /z data
247  if (waveexists(data))
248  //adh5_redim(data) // not to be used with adh5_load_detector_slabs
249  adh5_scale(data)
250  endif
251 
252  HDF5CloseFile fileID
253  else
254  AFileName = ""
255  endif
256  #else
257  Abort "HDF5 XOP not loaded."
258  #endif
259 
260  if (timerRefNum >= 0)
261  adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
262  endif
263 
264  setdatafolder saveDF
265  return AFileName
266 };
267 
294 string adh5_load_reduced(string ANickName, string APathName, string AFileName, funcref reduction_func, string reduction_param, variable load_data = defaultValue, variable load_attr = defaultValue, variable progress = defaultValue){
295  string ANickName
296  string APathName
297  string AFileName
298 
299  funcref adh5_default_reduction reduction_func
300  string reduction_param
301 
302  variable load_data
303  variable load_attr
304  variable progress
305 
306  if (ParamIsDefault(load_data))
307  load_data = 1
308  endif
309  if (ParamIsDefault(load_attr))
310  load_attr = 1
311  endif
312  if (ParamIsDefault(progress))
313  progress = 1
314  endif
315 
316  dfref saveDF = GetDataFolderDFR()
317  setdatafolder root:
318  newdatafolder /s/o $("root:" + ANickName)
319 
320  // open file
321  variable fileID
322  string instrumentpath = "/entry/instrument/"
323  string detectorpath = instrumentpath + "detector/"
324  string attributespath = instrumentpath + "NDAttributes/"
325  string datasetname
326  string datawavename
327 
328  // performance monitoring
329  variable timerRefNum
330  variable /g adh5_perf_secs
331  timerRefNum = startMSTimer
332 
333  // avoid compilation error if HDF5 XOP has not been loaded
334  #if Exists("HDF5OpenFile")
335  HDF5OpenFile /P=$APathName/R fileID as AFileName
336  if (v_flag == 0)
337  AFileName = s_path + s_filename
338  print "loading " + s_filename + "\r"
339 
340  if (load_attr)
341  newdatafolder /o/s attr
342  adh5_loadattr_all(fileID, attributespath)
343  setdatafolder ::
344  endif
345  if (load_data)
346  adh5_load_reduced_detector(fileID, detectorpath, reduction_func, reduction_param, progress=progress)
347  endif
348 
349  HDF5CloseFile fileID
350  else
351  AFileName = ""
352  endif
353  #else
354  Abort "HDF5 XOP not loaded."
355  #endif
356 
357  if (timerRefNum >= 0)
358  adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
359  endif
360 
361  setdatafolder saveDF
362  return AFileName
363 };
364 
379 string adh5_load_preview(string ANickName, string APathName, string AFileName, variable load_data = defaultValue, variable load_attr = defaultValue){
380  string ANickName
381  string APathName
382  string AFileName
383  variable load_data
384  variable load_attr
385 
386  if (ParamIsDefault(load_data))
387  load_data = 1
388  endif
389  if (ParamIsDefault(load_attr))
390  load_attr = 1
391  endif
392 
393  dfref saveDF = GetDataFolderDFR()
394  setdatafolder root:
395  newdatafolder /o/s pearl_area
396  newdatafolder /o/s preview
397 
398  // open file
399  variable fileID
400  string instrumentpath = "/entry/instrument/"
401  string detectorpath = instrumentpath + "detector/"
402  string attributespath = instrumentpath + "NDAttributes/"
403  string datasetname
404  string datawavename
405 
406  // performance monitoring
407  variable timerRefNum
408  variable /g adh5_perf_secs
409  timerRefNum = startMSTimer
410 
411  // avoid compilation error if HDF5 XOP has not been loaded
412  #if Exists("HDF5OpenFile")
413  HDF5OpenFile /P=$APathName/R/Z fileID as AFileName
414  if (v_flag == 0)
415  AFileName = s_path + s_filename
416 
417  // detector data
418  datasetname = detectorpath + "data"
419  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
420  InitHDF5DataInfo(di)
421  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
422  if (err != 0)
423  print "error accessing detector/data"
424  return ""
425  endif
426  if (di.ndims < 2)
427  print "error: rank of dataset < 2"
428  return ""
429  endif
430 
431  variable dim2start = 0, dim2count = 1, dim3start = 0, dim3count = 1
432  if (di.ndims >= 3)
433  dim2start = floor(di.dims[di.ndims - 3] / 2)
434  dim2count = 1
435  endif
436  if (di.ndims >= 4)
437  dim3start = floor(di.dims[di.ndims - 4] / 2)
438  dim3count = 1
439  endif
440 
441  if (load_data)
442  adh5_load_detector_image(fileID, detectorpath, dim2start, dim2count, dim3start, dim3count)
443  wave /z data
444  string destpath = GetDataFolder(1, saveDF) + ANickName
445  if (waveexists(data))
446  duplicate /o data, $destpath
447  wave /z data = $destpath
448  endif
449  endif
450 
451  if (load_attr)
452  setdatafolder saveDF
453  newdatafolder /o/s attr
454  killwaves /a/z
455  adh5_loadattr_all(fileID, attributespath)
456  setdatafolder ::
457  if (waveexists(data))
458  adh5_scale(data)
459  endif
460  endif
461 
462  HDF5CloseFile fileID
463  else
464  print "error opening file " + AFileName
465  AFileName = ""
466  endif
467  #else
468  Abort "HDF5 XOP not loaded."
469  #endif
470 
471  if (timerRefNum >= 0)
472  adh5_perf_secs = stopMSTimer(timerRefNum) / 1e6
473  endif
474 
475  setdatafolder saveDF
476  return AFileName
477 };
478 
489 string adh5_load_info(string APathName, string AFileName){
490  string APathName
491  string AFileName
492 
493  dfref saveDF = GetDataFolderDFR()
494 
495  // open file
496  variable fileID
497  string instrumentpath = "/entry/instrument/"
498  string detectorpath = instrumentpath + "detector/"
499  string attributespath = instrumentpath + "NDAttributes/"
500  string datasetname
501  string datawavename
502 
503  string s_info = ""
504  string s
505 
506  variable idim
507 
508  // avoid compilation error if HDF5 XOP has not been loaded
509  #if Exists("HDF5OpenFile")
510  HDF5OpenFile /P=$APathName/R/Z fileID as AFileName
511  if (v_flag == 0)
512  AFileName = s_path + s_filename
513 
514  // detector data
515  datasetname = detectorpath + "data"
516  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
517  InitHDF5DataInfo(di)
518  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
519  if (err != 0)
520  print "error accessing detector/data"
521  return ""
522  endif
523 
524  for (idim = 0; idim < di.ndims; idim += 1)
525  sprintf s, "dim %u: %u points", idim, di.dims[idim]
526  if (strlen(s_info) > 0)
527  s_info = s_info + "\r" + s
528  else
529  s_info = s
530  endif
531  endfor
532 
533  dfref df = NewFreeDataFolder()
534  setdatafolder df
535  adh5_loadattr_all(fileID, attributespath)
536 
537  for (idim = 1; idim < 5; idim += 1)
538  sprintf s, "Scan%uActive", idim
539  wave /z w = $s
540  if (waveexists(w) && (numpnts(w) > 0) && (w[0] > 0))
541  sprintf s, "Scan%uPositioner1", idim
542  wave /t wt = $s
543  sprintf s, "scan %u: %s", idim, wt[0]
544  if (strlen(s_info) > 0)
545  s_info = s_info + "\r" + s
546  else
547  s_info = s
548  endif
549  endif
550  endfor
551 
552  HDF5CloseFile fileID
553  else
554  print "error opening file " + AFileName
555  AFileName = ""
556  endif
557  #else
558  Abort "HDF5 XOP not loaded."
559  #endif
560 
561  setdatafolder saveDF
562  return s_info
563 };
564 
573 variable adh5_load_detector(variable fileID, string detectorpath){
574  variable fileID
575  string detectorpath
576 
577  // avoid compilation error if HDF5 XOP has not been loaded
578  #if Exists("HDF5LoadData")
579  string datasetname
580  string datawavename
581 
582  // detector data
583  datasetname = detectorpath + "data"
584  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
585  InitHDF5DataInfo(di)
586  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
587  if (err != 0)
588  print "error accessing detector/data"
589  return -1
590  endif
591  if (di.ndims < 2)
592  print "error: rank of dataset < 2"
593  return -2
594  endif
595 
596  HDF5LoadData /O /Q /Z fileID, datasetname
597  wave data
598 
599  #else
600  Abort "HDF5 XOP not loaded."
601  #endif
602 };
603 
618 variable adh5_redim(wave data){
619  wave data
620 
621  duplicate /free data, tempdata
622  variable nd = wavedims(tempdata)
623  variable nx = dimsize(tempdata, nd - 1)
624  variable ny = dimsize(tempdata, nd - 2)
625  variable nz = dimsize(tempdata, nd - 3)
626  variable nt = dimsize(tempdata, nd - 4)
627 
628  switch (nd)
629  case 2:
630  if (nx <= 1)
631  redimension /n=(ny) data
632  setdimlabel 0, -1, AD_Dim1, data
633  data = tempdata[p][0]
634  else if (ny <= 1)
635  redimension /n=(nx) data
636  setdimlabel 0, -1, AD_Dim0, data
637  data = tempdata[0][p]
638  else
639  redimension /n=(nx,ny) data
640  setdimlabel 0, -1, AD_Dim0, data
641  setdimlabel 1, -1, AD_Dim1, data
642  data = tempdata[q][p]
643  endif
644  break
645  case 3:
646  if (nx <= 1)
647  redimension /n=(ny,nz) data
648  setdimlabel 0, -1, AD_Dim1, data
649  setdimlabel 1, -1, AD_DimN, data
650  multithread data = tempdata[q][p][0]
651  else if (ny <= 1)
652  redimension /n=(nx,nz) data
653  setdimlabel 0, -1, AD_Dim0, data
654  setdimlabel 1, -1, AD_DimN, data
655  multithread data = tempdata[q][0][p]
656  else if (nz <= 1)
657  redimension /n=(nx,ny) data
658  setdimlabel 0, -1, AD_Dim0, data
659  setdimlabel 1, -1, AD_Dim1, data
660  multithread data = tempdata[0][q][p]
661  else
662  redimension /n=(nx,ny,nz) data
663  setdimlabel 0, -1, AD_Dim0, data
664  setdimlabel 1, -1, AD_Dim1, data
665  setdimlabel 2, -1, AD_DimN, data
666  multithread data = tempdata[r][q][p]
667  endif
668  break
669  case 4:
670  if (nz <= 1)
671  // singleton "frame number" dimension
672  redimension /n=(nx,ny,nt) data
673  setdimlabel 0, -1, AD_Dim0, data
674  setdimlabel 1, -1, AD_Dim1, data
675  setdimlabel 2, -1, AD_DimX, data
676  multithread data = tempdata[r][0][q][p]
677  else
678  redimension /n=(nx,ny,nz,nt) data
679  setdimlabel 0, -1, AD_Dim0, data
680  setdimlabel 1, -1, AD_Dim1, data
681  setdimlabel 2, -1, AD_DimN, data
682  setdimlabel 3, -1, AD_DimX, data
683  multithread data = tempdata[s][r][q][p]
684  endif
685  break
686  endswitch
687 };
688 
699 static dfr GetAttrDataFolderDFR(wave data){
700  wave data
701 
702  dfref dataDF = GetWavesDataFolderDFR(data)
703  dfref attrDF = dataDF:attr
704  if (DataFolderRefStatus(attrDF) == 0)
705  attrDF = dataDF
706  endif
707 
708  return attrDF
709 };
710 
718 variable adh5_scale(wave data, string source = defaultValue){
719  wave data
720  string source
721 
722  dfref saveDF = GetDataFolderDFR()
723  dfref dataDF = GetWavesDataFolderDFR(data)
724  dfref attrDF = GetAttrDataFolderDFR(data)
725 
726  if (ParamIsDefault(source))
727  // is the source a Scienta analyser?
728  wave /SDFR=attrDF /Z AcquisitionMode
729  wave /SDFR=attrDF /T /Z Manufacturer
730  source = "unknown"
731  if (waveexists(Manufacturer) && (numpnts(Manufacturer) >= 1))
732  strswitch(Manufacturer[0])
733  case "VG Scienta":
734  source = "scienta"
735  break
736  case "Prosilica":
737  source = "prosilica"
738  break
739  endswitch
740  else if (waveexists(AcquisitionMode) && (numpnts(AcquisitionMode) >= 1))
741  if (stringmatch(note(AcquisitionMode), "*SCIENTA*"))
742  source = "scienta"
743  endif
744  endif
745  endif
746 
747  strswitch(source)
748  case "prosilica":
749  // pixel scale - nothing to do
750  break
751  case "scienta":
752  adh5_scale_scienta(data)
753  break
754  endswitch
755 
756  setdatafolder saveDF
757 };
758 
772 variable adh5_load_detector_slabs(variable fileID, string detectorpath, variable progress = defaultValue){
773  variable fileID
774  string detectorpath
775  variable progress
776 
777  if (ParamIsDefault(progress))
778  progress = 1
779  endif
780  variable result = 0
781 
782  // avoid compilation error if HDF5 XOP has not been loaded
783  #if Exists("HDF5LoadData")
784  string datasetname
785  string datawavename
786 
787  // detector data
788  datasetname = detectorpath + "data"
789  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
790  InitHDF5DataInfo(di)
791  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
792  if (err != 0)
793  print "error accessing detector/data"
794  return -1
795  endif
796  if (di.ndims < 2)
797  print "error: rank of dataset < 2"
798  return -2
799  endif
800 
801  // nx and nz are the image dimensions
802  variable idx, idy, idz, idt, izt
803  idx = di.ndims - 1
804  idy = di.ndims - 2
805  idz = -1
806  idt = -1
807 
808  variable nx, ny, nz, nt, nzt
809  nx = di.dims[idx]
810  ny = di.dims[idy]
811  nz = 1
812  nt = 1
813 
814  make /n=(nx,ny,nz,nt) /o data
815  string dim_labels = "AD_Dim0;AD_Dim1;AD_DimN;AD_DimX;AD_DimY"
816  string dim_label
817  dim_label = StringFromList(0, dim_labels, ";")
818  setdimlabel 0, -1, $dim_label, data
819  dim_labels = RemoveFromList(dim_label, dim_labels, ";")
820  dim_label = StringFromList(0, dim_labels, ";")
821  setdimlabel 1, -1, $dim_label, data
822  dim_labels = RemoveFromList(dim_label, dim_labels, ";")
823 
824  // find additional dimensions, ignore singletons
825  variable id
826  for (id = idy - 1; (id >= 0) && (nz == 1); id -= 1)
827  if (di.dims[id] > 1)
828  idz = id
829  nz = di.dims[id]
830  dim_label = StringFromList(0, dim_labels, ";")
831  setdimlabel 2, -1, $dim_label, data
832  endif
833  dim_labels = RemoveListItem(0, dim_labels, ";")
834  endfor
835  for (id = idz - 1; (id >= 0) && (nt == 1); id -= 1)
836  if (di.dims[id] > 1)
837  idt = id
838  nt = di.dims[id]
839  dim_label = StringFromList(0, dim_labels, ";")
840  setdimlabel 3, -1, $dim_label, data
841  endif
842  dim_labels = RemoveListItem(0, dim_labels, ";")
843  endfor
844  redimension /n=(nx,ny,nz,nt) data
845 
846  // default values if dimensions are not present in dataset
847  if (idz < 0)
848  idz = idx + 1
849  idt = idz + 1
850  else if (idt < 0)
851  idt = idx + 1
852  endif
853 
854  nzt = nz * nt
855  izt = 0
856  if (progress)
857  display_progress_panel("HDF5 Import", "Loading data...", nzt)
858  endif
859 
860  // load data image by image
861  HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
862  wave slab
863  slab[][%Start] = 0
864  slab[][%Stride] = 1
865  slab[][%Count] = 1
866  slab[][%Block] = 1
867  slab[idx][%Block] = nx
868  slab[idy][%Block] = ny
869 
870  variable iz, it
871  for (iz = 0; iz < nz; iz += 1)
872  for (it = 0; it < nt; it += 1)
873  slab[idz][%Start] = iz
874  slab[idt][%Start] = it
875  HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
876  wave slabdata// 2D, 3D, or 4D with singletons
877  switch (WaveDims(slabdata))
878  case 2:
879  data[][][iz][it] = slabdata[q][p]
880  break
881  case 3:
882  data[][][iz][it] = slabdata[0][q][p]
883  break
884  case 4:
885  data[][][iz][it] = slabdata[0][0][q][p]
886  break
887  endswitch
888  // progress window
889  izt += 1
890  if (progress)
891  if (update_progress_panel(izt))
892  result = -4// user abort
893  break
894  endif
895  endif
896  endfor
897  if (result < 0)
898  break
899  endif
900  endfor
901 
902  if (nz == 1)
903  redimension /n=(nx,ny) data
904  else if (nt == 1)
905  redimension /n=(nx,ny,nz) data
906  endif
907 
908  if (progress)
910  endif
911  #else
912  Abort "HDF5 XOP not loaded."
913  #endif
914 
915  return result
916 };
917 
937 variable adh5_load_detector_image(variable fileID, string detectorpath, variable dim2start, variable dim2count, variable dim3start, variable dim3count){
938  variable fileID
939  string detectorpath
940  variable dim2start
941  variable dim2count
942  variable dim3start
943  variable dim3count
944 
945  // avoid compilation error if HDF5 XOP has not been loaded
946  #if Exists("HDF5LoadData")
947  string datasetname
948  string datawavename
949 
950  // detector data
951  datasetname = detectorpath + "data"
952  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
953  InitHDF5DataInfo(di)
954  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
955  if (err != 0)
956  print "error accessing detector/data"
957  return -1
958  endif
959  if (di.ndims < 1)
960  print "error: rank of dataset < 1"
961  return -2
962  endif
963 
964  // nx and nz are the image dimensions
965  variable idx, idy, idz, idt
966  idx = di.ndims - 1
967  idy = di.ndims >= 2 ? di.ndims - 2 : 1
968  idz = di.ndims >= 3 ? di.ndims - 3 : 2
969  idt = di.ndims >= 4 ? di.ndims - 4 : 3
970 
971  variable nx, ny
972  nx = di.dims[idx]
973  ny = di.ndims >= 2 ? di.dims[idy] : 1
974 
975  variable dim2end = dim2start + dim2count - 1
976  variable dim3end = dim3start + dim3count - 1
977 
978  // the slab wave is at least 4-dimensional
979  // it will also load lower-dimensional datasets
980  HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
981  wave slab
982  slab[][%Start] = 0
983  slab[][%Stride] = 1
984  slab[][%Count] = 1
985  slab[][%Block] = 1
986  slab[idx][%Block] = nx
987  slab[idy][%Block] = ny
988 
989  make /n=(nx,ny)/o/d data
990  data = 0
991  variable iz, it
992  variable navg = 0
993  for (iz = dim2start; iz <= dim2end; iz += 1)
994  for (it = dim3start; it <= dim3end; it += 1)
995  slab[idz][%Start] = iz
996  slab[idt][%Start] = it
997  HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
998  wave slabdata// 2D, 3D, or 4D with singletons
999  switch (WaveDims(slabdata))
1000  case 1:
1001  data += slabdata[p]
1002  navg += 1
1003  break
1004  case 2:
1005  data += slabdata[q][p]
1006  navg += 1
1007  break
1008  case 3:
1009  data += slabdata[0][q][p]
1010  navg += 1
1011  break
1012  case 4:
1013  data += slabdata[0][0][q][p]
1014  navg += 1
1015  break
1016  endswitch
1017  endfor
1018  endfor
1019  data /= navg
1020  setdimlabel 0, -1, AD_Dim0, data
1021  setdimlabel 1, -1, AD_Dim1, data
1022 
1023  #else
1024  Abort "HDF5 XOP not loaded."
1025  #endif
1026 };
1027 
1034  string all_funcs = FunctionList("*", ";", "KIND:6,NPARAMS:4,VALTYPE:1")
1035  string result = ""
1036 
1037  variable ii
1038  variable nn = ItemsInList(all_funcs, ";")
1039 
1040  string funcname
1041  string info
1042  variable nparams
1043  variable accept
1044 
1045  for (ii = 0; ii < nn; ii += 1)
1046  funcname = StringFromList(ii, all_funcs, ";")
1047  info = FunctionInfo(funcname)
1048  accept = (NumberByKey("RETURNTYPE", info, ":", ";") == 0x0004)
1049  accept = accept && (cmpstr(StringByKey("THREADSAFE", info, ":", ";"), "yes") == 0)
1050  accept = accept && (NumberByKey("N_PARAMS", info, ":", ";") == 4)
1051  accept = accept && (NumberByKey("N_OPT_PARAMS", info, ":", ";") == 0)
1052  if (accept)
1053  // 3 numeric waves and one pass-by-reference string
1054  accept = accept && (NumberByKey("PARAM_0_TYPE", info, ":", ";") == 0x4002)
1055  accept = accept && (NumberByKey("PARAM_1_TYPE", info, ":", ";") == 0x4002)
1056  accept = accept && (NumberByKey("PARAM_2_TYPE", info, ":", ";") == 0x4002)
1057  accept = accept && (NumberByKey("PARAM_3_TYPE", info, ":", ";") == 0x3000)
1058  endif
1059  if (accept)
1060  result = AddListItem(funcname, result, ";")
1061  endif
1062  endfor
1063 
1064  result = SortList(result, ";", 4)
1065  return result
1066 };
1067 
1072 // the resulting wave must have the same size as either dimension of the source image.
1093 threadsafe variable adh5_default_reduction(wave source, wave dest1, wave dest2, string* param){
1094  wave source
1095  wave dest1, dest2
1096  string &param
1097 
1098  // demo code
1099  // integrate along the dimensions
1100  adh5_setup_profile(source, dest1, 0)
1101  ad_profile_x_w(source, 0, -1, dest1)
1102  adh5_setup_profile(source, dest2, 1)
1103  ad_profile_y_w(source, 0, -1, dest2)
1104 
1105  return 0
1106 };
1107 
1113 threadsafe variable adh5_setup_profile(wave image, wave profile, variable dim){
1114  wave image// prototype
1115  wave profile// destination wave
1116  variable dim// which dimension to keep: 0 = X, 1 = Y
1117 
1118  redimension /n=(dimsize(image, dim)) profile
1119  setscale /p x dimoffset(image, dim), dimdelta(image, dim), waveunits(image, dim), profile
1120  setscale d 0, 0, waveunits(image, -1), profile
1121  setdimlabel 0, -1, $getdimlabel(image, dim, -1), profile
1122 };
1123 
1128 string adh5_test_reduction_func(wave source, wave dest1, wave dest2, funcref reduction_func, string reduction_param){
1129  wave source
1130  wave dest1
1131  wave dest2
1132  funcref adh5_default_reduction reduction_func
1133  string reduction_param
1134 
1135  reduction_func(source, dest1, dest2, reduction_param)
1136 
1137  return reduction_param
1138 };
1139 
1162 variable adh5_load_reduced_detector(variable fileID, string detectorpath, funcref reduction_func, string reduction_param, variable progress = defaultValue, variable nthreads = defaultValue){
1163  variable fileID
1164  string detectorpath
1165  funcref adh5_default_reduction reduction_func
1166  string reduction_param
1167  variable progress
1168  variable nthreads
1169 
1170  if (ParamIsDefault(progress))
1171  progress = 1
1172  endif
1173  if (ParamIsDefault(nthreads))
1174  nthreads = -1
1175  endif
1176  variable result = 0
1177 
1178  // avoid compilation error if HDF5 XOP has not been loaded
1179  #if Exists("HDF5LoadData")
1180  string datasetname
1181  string datawavename
1182 
1183  // detector data
1184  datasetname = detectorpath + "data"
1185  STRUCT HDF5DataInfo di// Defined in HDF5 Browser.ipf.
1186  InitHDF5DataInfo(di)
1187  variable err = HDF5DatasetInfo(fileID, datasetname, 0, di)
1188  if (err != 0)
1189  print "error accessing detector/data"
1190  return -1
1191  endif
1192  if (di.ndims < 2)
1193  print "error: rank of dataset < 2"
1194  return -2
1195  endif
1196 
1197  // nx and nz are the image dimensions
1198  variable idx, idy, idz, idt
1199  idx = di.ndims - 1
1200  idy = di.ndims - 2
1201  idz = -1
1202  idt = -1
1203 
1204  variable nx, ny, nz, nt
1205  nx = di.dims[idx]
1206  ny = di.dims[idy]
1207  nz = 1
1208  nt = 1
1209 
1210  // find additional dimensions, ignore singletons
1211  variable id
1212  for (id = idy - 1; (id >= 0) && (nz == 1); id -= 1)
1213  if (di.dims[id] > 1)
1214  idz = id
1215  nz = di.dims[id]
1216  endif
1217  endfor
1218  for (id = idz - 1; (id >= 0) && (nt == 1); id -= 1)
1219  if (di.dims[id] > 1)
1220  idt = id
1221  nt = di.dims[id]
1222  endif
1223  endfor
1224  // default values if dimensions are not present in dataset
1225  if (idz < 0)
1226  idz = idx + 1
1227  idt = idz + 1
1228  else if (idt < 0)
1229  idt = idx + 1
1230  endif
1231  variable nzt = nz * nt
1232  variable izt
1233 
1234  // load data image by image
1235  HDF5MakeHyperslabWave(GetDataFolder(1) + "slab", max(di.ndims, 4))
1236  wave slab
1237  slab[][%Start] = 0
1238  slab[][%Stride] = 1
1239  slab[][%Count] = 1
1240  slab[][%Block] = 1
1241  slab[idx][%Block] = nx
1242  slab[idy][%Block] = ny
1243 
1244  // set up multi threading
1245  if (nthreads < 0)
1246  nthreads = ThreadProcessorCount
1247  endif
1248  if (nthreads > 0)
1249  variable threadGroupID = ThreadGroupCreate(nthreads)
1250  variable ithread
1251  for (ithread = 0; ithread < nthreads; ithread += 1)
1252  ThreadStart threadGroupID, ithread, reduce_slab_worker(reduction_func)
1253  endfor
1254  else
1255  make /n=(nzt) /df /free processing_folders
1256  endif
1257 
1258  if (progress)
1259  display_progress_panel("HDF5 Import", "Loading data (step 1 of 2)...", nzt)
1260  endif
1261 
1262  make /n=(nx,ny)/d image_template
1263  setdimlabel 0, -1, AD_Dim0, image_template
1264  setdimlabel 1, -1, AD_Dim1, image_template
1265  adh5_scale(image_template)
1266 
1267  variable iz, it
1268  string dfname
1269  izt = 0
1270  for (iz = 0; iz < nz; iz += 1)
1271  for (it = 0; it < nt; it += 1)
1272  // load hyperslab
1273  slab[idz][%Start] = iz
1274  slab[idt][%Start] = it
1275  dfname = "processing_" + num2str(izt)
1276  newdatafolder /s $dfname
1277  HDF5LoadData /O /Q /Z /SLAB=slab /N=slabdata fileID, datasetname
1278 
1279  // send to processing queue
1280  duplicate image_template, image
1281  variable /g r_index = iz
1282  variable /g s_index = it
1283  string /g func_param = reduction_param
1284 
1285  if (nthreads > 0)
1286  WaveClear image
1287  ThreadGroupPutDF threadGroupID, :
1288  else
1289  processing_folders[izt] = GetDataFolderDFR()
1290  make /n=1/d profile1, profile2
1291  wave slabdata
1292  variable /g func_result
1293  func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param)
1294  WaveClear slabdata, image, profile1, profile2
1295  setdatafolder ::
1296  endif
1297 
1298  izt += 1
1299  // progress window
1300  if (progress)
1301  if (update_progress_panel(izt))
1302  result = -4// user abort
1303  break
1304  endif
1305  endif
1306  endfor
1307  endfor
1308 
1309  killwaves /z slab, image_template
1310  if (progress)
1311  update_progress_panel(0, message="Processing data (step 2 of 2)...")
1312  endif
1313 
1314  dfref dfr
1315  for (izt = 0; (izt < nzt) && (result == 0); izt += 1)
1316  if (nthreads > 0)
1317  do
1318  dfr = ThreadGroupGetDFR(threadGroupID, 1000)
1319  if (DatafolderRefStatus(dfr) != 0)
1320  break
1321  endif
1322  if (progress)
1323  if (update_progress_panel(izt))
1324  result = -4// user abort
1325  break
1326  endif
1327  endif
1328  while (1)
1329  else
1330  dfr = processing_folders[izt]
1331  if (progress)
1332  if (update_progress_panel(izt))
1333  result = -4// user abort
1334  break
1335  endif
1336  endif
1337  endif
1338 
1339  if (result != 0)
1340  break
1341  endif
1342 
1343  nvar rr = dfr:r_index
1344  nvar ss = dfr:s_index
1345  nvar func_result = dfr:func_result
1346  wave profile1 = dfr:profile1
1347  wave profile2 = dfr:profile2
1348 
1349  if (func_result == 0)
1350  if (izt == 0)
1351  make /n=(dimsize(profile1, 0), nz, nt)/d/o data1
1352  make /n=(dimsize(profile2, 0), nz, nt)/d/o data2
1353  setdimlabel 0, -1, $getdimlabel(profile1, 0, -1), data1
1354  setdimlabel 0, -1, $getdimlabel(profile2, 0, -1), data2
1355  setscale /p x dimoffset(profile1, 0), dimdelta(profile1, 0), waveunits(profile1, 0), data1
1356  setscale /p x dimoffset(profile2, 0), dimdelta(profile2, 0), waveunits(profile2, 0), data2
1357  setscale d 0, 0, waveunits(profile1, -1), data1
1358  setscale d 0, 0, waveunits(profile2, -1), data2
1359  endif
1360  data1[][rr][ss] = profile1[p]
1361  data2[][rr][ss] = profile2[p]
1362  else
1363  result = -3// dimension reduction error
1364  break
1365  endif
1366  endfor
1367 
1368  if (nthreads > 0)
1369  variable tstatus = ThreadGroupRelease(threadGroupID)
1370  if (tstatus == -2)
1371  result = -5// thread did not terminate properly
1372  endif
1373  else
1374  for (izt = 0; izt < nzt; izt += 1)
1375  KillDataFolder /Z processing_folders[izt]
1376  endfor
1377  endif
1378 
1379  if (result == 0)
1380  if (nz == 1)
1381  redimension /n=(dimsize(data1,0)) data1
1382  redimension /n=(dimsize(data2,0)) data2
1383  else if (nt == 1)
1384  redimension /n=(dimsize(data1,0),nz) data1
1385  redimension /n=(dimsize(data2,0),nz) data2
1386  setdimlabel 1, -1, AD_DimN, data1
1387  setdimlabel 1, -1, AD_DimN, data2
1388  else
1389  setdimlabel 1, -1, AD_DimN, data1
1390  setdimlabel 1, -1, AD_DimN, data2
1391  setdimlabel 2, -1, AD_DimX, data1
1392  setdimlabel 2, -1, AD_DimX, data2
1393  endif
1394  endif
1395  if (progress)
1397  endif
1398 
1399  #else
1400  Abort "HDF5 XOP not loaded."
1401  #endif
1402  return result
1403 };
1404 
1405 threadsafe static variable reduce_slab_worker(funcref reduction_func){
1406  funcref adh5_default_reduction reduction_func
1407  do
1408  // wait for job from main thread
1409  do
1410  dfref dfr = ThreadGroupGetDFR(0, 1000)
1411  if (DataFolderRefStatus(dfr) == 0)
1412  if (GetRTError(2))
1413  return 0// no more jobs
1414  endif
1415  else
1416  break
1417  endif
1418  while (1)
1419 
1420  // get input data
1421  wave slabdata = dfr:slabdata
1422  wave image = dfr:image
1423  svar func_param = dfr:func_param
1424  nvar rr = dfr:r_index
1425  nvar ss = dfr:s_index
1426 
1427  // do the work
1428  newdatafolder /s outDF
1429  make /n=1/d profile1, profile2
1430  variable /g r_index = rr
1431  variable /g s_index = ss
1432  variable /g func_result
1433  func_result = reduce_slab_image(slabdata, image, profile1, profile2, reduction_func, func_param)
1434 
1435  // send output to queue and clean up
1436  WaveClear slabdata, image, profile1, profile2
1437  ThreadGroupPutDF 0, :
1438  KillDataFolder dfr
1439  while (1)
1440 
1441  return 0
1442 };
1443 
1444 threadsafe static variable reduce_slab_image(wave slabdata, wave image, wave profile1, wave profile2, funcref reduction_func, string reduction_param){
1445  wave slabdata
1446  wave image
1447  wave profile1
1448  wave profile2
1449  funcref adh5_default_reduction reduction_func
1450  string reduction_param
1451 
1452  switch (WaveDims(slabdata))
1453  case 2:
1454  image = slabdata[q][p]
1455  break
1456  case 3:
1457  image = slabdata[0][q][p]
1458  break
1459  case 4:
1460  image = slabdata[0][0][q][p]
1461  break
1462  endswitch
1463 
1464  return reduction_func(image, profile1, profile2, reduction_param)
1465 };
1466 
1479 variable adh5_loadattr_all(variable fileID, string attributespath){
1480  variable fileID
1481  string attributespath
1482 
1483  string datasetname
1484  string datawavename
1485 
1486  // avoid compilation error if HDF5 XOP has not been loaded
1487  #if Exists("HDF5LoadData")
1488 
1489  // datasets in NDAttributes group
1490  HDF5ListGroup /F /TYPE=2 fileID, attributespath
1491  string h5datasets = S_HDF5ListGroup
1492  HDF5ListAttributes /TYPE=1 /Z fileID, attributespath
1493  string h5attributes = S_HDF5ListAttributes
1494 
1495  variable nds = ItemsInList(h5datasets, ";")
1496  variable na = ItemsInList(h5attributes, ";")
1497  variable ids
1498  variable idest = 0
1499  variable n_attr
1500  string s_attr
1501  string s_source
1502 
1503  make /n=(nds+na) /t /o IN, ID, IV, IU
1504 
1505  for (ids = 0; ids < nds; ids += 1)
1506  datasetname = StringFromList(ids, h5datasets, ";")
1507  HDF5LoadData /O/Q fileID, datasetname
1508  if (v_flag == 0)
1509  datawavename = StringFromList(0, s_wavenames)
1510  else
1511  datawavename = ""
1512  endif
1513  HDF5LoadData /A="source"/O/Q/TYPE=2 fileID, datasetname
1514  if (v_flag == 0)
1515  wave /t source
1516  s_source = source[0]
1517  else
1518  s_source = ""
1519  endif
1520  read_attribute_info(datawavename, s_source, idest)
1521  endfor
1522 
1523  // attributes of NDAttributes group
1524  if (v_flag == 0)
1525  nds = ItemsInList(h5attributes, ";")
1526  else
1527  nds = 0
1528  endif
1529  for (ids = 0; ids < nds; ids += 1)
1530  datasetname = StringFromList(ids, h5attributes, ";")
1531  HDF5LoadData /A=datasetname/O/Q/TYPE=1 fileID, attributespath
1532  if (v_flag == 0)
1533  datawavename = StringFromList(0, s_wavenames)
1534  read_attribute_info(datawavename, "", idest)// we don't get the source of these attributes
1535  endif
1536  endfor
1537 
1538  redimension /n=(idest) IN, ID, IV, IU
1539  sort {IN, ID}, IN, ID, IV, IU
1540 
1541  killwaves /z source
1542  #else
1543  Abort "HDF5 XOP not loaded."
1544  #endif
1545 
1546 };
1547 
1561 static variable read_attribute_info(string datawavename, string source, variable* idest){
1562  string datawavename// name of the attribute wave in the current folder.
1563  // can be text or numeric.
1564  string source
1565  // source identifier (EPICS name) of the attribute.
1566  variable &idest
1567  // destination index in IN, ID, IV, IU where the results are written.
1568  // the variable is incremented if data was written, otherwise it is left unchanged.
1569  // make sure IN, ID, IV, IU have at least idest + 1 elements.
1570 
1571  wave /t IN
1572  wave /t ID
1573  wave /t IV
1574  wave /t IU
1575 
1576  variable n_attr
1577  string s_attr
1578 
1579  if (exists(datawavename) == 1)
1580  if (strlen(source) > 0)
1581  Note $datawavename, "PV=" + source
1582  endif
1583  switch(WaveType($datawavename, 1))
1584  case 1:// numeric
1585  wave w_attr = $datawavename
1586  n_attr = numpnts(w_attr)
1587  sprintf s_attr, "%.12g", w_attr[0]
1588  break
1589  case 2:// text
1590  wave /t wt_attr = $datawavename
1591  n_attr = numpnts(wt_attr)
1592  s_attr = wt_attr[0]
1593  break
1594  default:// unknown
1595  n_attr = 0
1596  endswitch
1597  if (n_attr == 1)
1598  IN[idest] = source
1599  ID[idest] = datawavename
1600  IV[idest] = s_attr
1601  IU[idest] = ""// we don't get the units
1602  idest += 1
1603  endif
1604  endif
1605 };
1606 
1614 variable adh5_scale_scienta(wave data){
1615  wave data
1616 
1617  dfref saveDF = GetDataFolderDFR()
1618 
1619  dfref dataDF = GetWavesDataFolderDFR(data)
1620  dfref attrDF = GetAttrDataFolderDFR(data)
1621 
1622  wave /SDFR=attrDF LensMode
1623  wave /SDFR=attrDF /Z ChannelBegin, ChannelEnd
1624  wave /SDFR=attrDF /Z SliceBegin, SliceEnd
1625 
1626  variable EDim, ADim
1627  variable ELow, EHigh, ALow, AHigh
1628  string EUnit, AUnit
1629 
1630  // which dimension is angle and which one is energy?
1631  strswitch(GetDimLabel(data, 0, -1))
1632  case "AD_Dim0":
1633  EDim = 0
1634  break
1635  case "AD_Dim1":
1636  EDim = 1
1637  break
1638  default:
1639  EDim = -1
1640  endswitch
1641  strswitch(GetDimLabel(data, 1, -1))
1642  case "AD_Dim0":
1643  ADim = 0
1644  break
1645  case "AD_Dim1":
1646  ADim = 1
1647  break
1648  default:
1649  ADim = -1
1650  endswitch
1651 
1652  // defaults (point scaling)
1653  if (EDim >= 0)
1654  ELow = dimoffset(data, EDim)
1655  EHigh = dimoffset(data, EDim) + dimdelta(data, EDim) * (dimsize(data, EDim) - 1)
1656  EUnit = "eV"
1657  endif
1658  if (ADim >= 0)
1659  ALow = dimoffset(data, ADim)
1660  AHigh = dimoffset(data, ADim) + dimdelta(data, ADim) * (dimsize(data, ADim) - 1)
1661  AUnit = "arb."
1662  endif
1663 
1664  // lens mode can give more detail
1665  if (waveexists(LensMode) && (numpnts(LensMode) >= 1))
1666  switch(LensMode[0])
1667  case 1:// Angular45
1668  ALow = -45/2
1669  AHigh = +45/2
1670  AUnit = "°"
1671  break
1672  case 2:// Angular60
1673  ALow = -60/2
1674  AHigh = +60/2
1675  AUnit = "°"
1676  break
1677  endswitch
1678  endif
1679 
1680  // best option if scales are explicit in separate waves
1681  if (waveexists(ChannelBegin) && waveexists(ChannelEnd) && (numpnts(ChannelBegin) >= 1) && (numpnts(ChannelEnd) >= 1))
1682  ELow = ChannelBegin[0]
1683  EHigh = ChannelEnd[0]
1684  endif
1685  if (waveexists(SliceBegin) && waveexists(SliceEnd) && (numpnts(SliceBegin) >= 1) && (numpnts(SliceEnd) >= 1))
1686  ALow = SliceBegin[0]
1687  AHigh = SliceEnd[0]
1688  endif
1689 
1690  // apply new scales
1691  switch(EDim)
1692  case 0:
1693  setscale /i x ELow, EHigh, EUnit, data
1694  break
1695  case 1:
1696  setscale /i y ELow, EHigh, EUnit, data
1697  break
1698  endswitch
1699  switch(ADim)
1700  case 0:
1701  setscale /i x ALow, AHigh, AUnit, data
1702  break
1703  case 1:
1704  setscale /i y ALow, AHigh, AUnit, data
1705  break
1706  endswitch
1707 
1708  setscale d 0, 0, "arb.", data
1709 
1710  setdatafolder saveDF
1711 };
1712 
1719 variable adh5_scale_scan(wave data){
1720  wave data
1721 
1722  dfref saveDF = GetDataFolderDFR()
1723 
1724  dfref dataDF = GetWavesDataFolderDFR(data)
1725  wave /SDFR=dataDF AcquisitionMode, DetectorMode, EnergyMode
1726 
1727  wave /SDFR=dataDF /z Scan1Active, Scan2Active
1728  wave /SDFR=dataDF /t /z Scan1Positioner1, Scan1Readback1
1729  wave /SDFR=dataDF /t /z Scan1Positioner2, Scan1Readback2
1730  wave /SDFR=dataDF /t /z Scan2Positioner1, Scan2Readback1
1731  wave /SDFR=dataDF /t /z Scan2Positioner2, Scan2Readback2
1732 
1733  // TODO : search the data folder for positioner waves,
1734  // i.e. waves with the PV name corresponding to Scan1Positioner1 in their wave note.
1735  wave /z zscale
1736 
1737  strswitch(GetDimLabel(data, 0, -1))
1738  case "AD_DimN":
1739  setscale /i x zscale[0], zscale[numpnts(zscale)-1], "", data
1740  break
1741  endswitch
1742  strswitch(GetDimLabel(data, 1, -1))
1743  case "AD_DimN":
1744  setscale /i y zscale[0], zscale[numpnts(zscale)-1], "", data
1745  break
1746  endswitch
1747  strswitch(GetDimLabel(data, 2, -1))
1748  case "AD_DimN":
1749  setscale /i z zscale[0], zscale[numpnts(zscale)-1], "", data
1750  break
1751  endswitch
1752 
1753  setdatafolder saveDF
1754 };
1755 
string adh5_load_preview(string ANickName, string APathName, string AFileName, variable load_data=defaultValue, variable load_attr=defaultValue)
load a single image from a HDF5 file created by the Area Detector software.
variable kill_progress_panel()
variable display_progress_panel(string title, string message, variable progress_max)
string adh5_load_info(string APathName, string AFileName)
load descriptive info from a HDF5 file created by the Area Detector software.
variable ad_load_dialog(string APathName)
load area detector data files selected in a file dialog window
threadsafe wave ad_profile_x_w(wave dataset, variable q1, variable q2, wave destwave, variable noavg=defaultValue)
1D cut through 2D dataset along X dimension, existing destination wave.
static variable read_attribute_info(string datawavename, string source, variable *idest)
sub-function of adh5_loadattr_all.
static dfr GetAttrDataFolderDFR(wave data)
find the attributes data folder of an area detector dataset.
threadsafe variable adh5_setup_profile(wave image, wave profile, variable dim)
set up a one-dimensional wave for a line profile based on a 2D original wave.
static threadsafe variable reduce_slab_image(wave slabdata, wave image, wave profile1, wave profile2, funcref reduction_func, string reduction_param)
variable adh5_scale_scienta(wave data)
set the energy and angle scales of an area detector dataset from the Scienta analyser.
string adh5_test_reduction_func(wave source, wave dest1, wave dest2, funcref reduction_func, string reduction_param)
wrapper function for testing reduction functions from the command line.
variable adh5_loadattr_all(variable fileID, string attributespath)
load an NDAttributes group from an open HDF5 file into the current data folder.
variable adh5_scale_scan(wave data)
scales the extra dimensions of an area detector dataset according to the EPICS scan ...
threadsafe variable adh5_default_reduction(wave source, wave dest1, wave dest2, string *param)
function prototype for adh5_load_reduced_detector
string ad_suggest_foldername(string filename, variable ignoredate=defaultValue, string sourcename=defaultValue, variable unique=defaultValue)
generate the name of a data folder based on a file name.
threadsafe wave ad_profile_y_w(wave dataset, variable p1, variable p2, wave destwave, variable noavg=defaultValue)
1D cut through 2D dataset along X dimension, existing destination wave.
string adh5_load_complete(string ANickName, string APathName, string AFileName, variable load_data=defaultValue, variable load_attr=defaultValue)
import everything from a HDF5 file created by the Area Detector software.
variable adh5_load_reduced_detector(variable fileID, string detectorpath, funcref reduction_func, string reduction_param, variable progress=defaultValue, variable nthreads=defaultValue)
load a reduced detector dataset from the open HDF5 file.
string adh5_load_reduced(string ANickName, string APathName, string AFileName, funcref reduction_func, string reduction_param, variable load_data=defaultValue, variable load_attr=defaultValue, variable progress=defaultValue)
load and reduce a dataset from a HDF5 file created by the Area Detector software. ...
variable adh5_redim(wave data)
redimension a multi-dimensional area detector array loaded from HDF5.
variable adh5_load_detector_slabs(variable fileID, string detectorpath, variable progress=defaultValue)
load the detector dataset from the open HDF5 file.
variable adh5_scale(wave data, string source=defaultValue)
set the dimension scales of an area detector dataset.
variable update_progress_panel(variable progress, string message=defaultValue, variable progress_max=defaultValue)
variable adh5_load_detector(variable fileID, string detectorpath)
load the detector dataset from the open HDF5 file.
static variable BeforeFileOpenHook(variable refNum, string fileName, string path, string type, string creator, variable kind)
callback function for drag&drop of HDF5 files into Igor.
string adh5_list_reduction_funcs()
get a list of functions which can be used as reduction functions.
variable adh5_load_detector_image(variable fileID, string detectorpath, variable dim2start, variable dim2count, variable dim3start, variable dim3count)
load a single image from the detector dataset of the open HDF5 file
static threadsafe variable reduce_slab_worker(funcref reduction_func)