downstripped code ...

This commit is contained in:
ebner 2014-09-24 14:03:36 +02:00
parent 3fe26723ef
commit 74aae47436
3 changed files with 745 additions and 726 deletions

View File

@ -94,131 +94,28 @@ public class HDF5Reader implements PlugIn {
inFile = new H5File(directory + name, H5File.READ);
inFile.open();
// Parse the file
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) inFile.getRootNode()).getUserObject();
List<Dataset> varList = getDatasets(rootNode, new ArrayList<Dataset>());
List<Dataset> datasets = HDF5Utilities.getDatasets(inFile);
List<Dataset> selectedDatasets = selectDatasets(datasets);
GenericDialog gd = new GenericDialog("Variable Name Selection");
gd.addMessage("Please select variables to be loaded.\n");
if (varList.size() < 1) {
IJ.error("The file did not contain variables. (broken?)");
inFile.close();
return;
} else if (varList.size() > 1000) {
logger.info("#######");
for(Dataset d: varList){
logger.info(d.getFullName());
}
logger.info("#######");
/*-----------------------------------------------------------------
* FIXME: quick an dirty hack for files with more than 1000
* datasets
*-----------------------------------------------------------------*/
gd = new GenericDialog("Variable Name Selection");
gd.addMessage("Too many variables in your file! " + "(More than 1000)\n\n" + "Please enter the full name of your desired dataset.");
gd.addStringField("dataset name", "");
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
String dsName = gd.getNextString();
varList = new ArrayList<Dataset>();
try {
HObject ds = inFile.get(dsName);
if (ds != null && ds instanceof Dataset) {
varList.add((Dataset) ds);
gd.addCheckbox("single variable", true);
} else {
IJ.error("The file does not contain a variable with name " + "`" + dsName + "`!");
inFile.close();
return;
}
} catch (Exception e) {
IJ.error("The file does not contain a variable with name " + "`" + dsName + "`!");
inFile.close();
return;
}
} else {
String[] varSelections = new String[varList.size()];
boolean[] defaultValues = new boolean[varList.size()];
for (int i = 0; i < varList.size(); i++) {
Dataset var = varList.get(i);
int rank = var.getRank();
String title = rank + "D: " + var.getFullName() + " " + var.getDatatype().getDatatypeDescription() + "( ";
long[] extent = var.getDims();
for (int d = 0; d < rank; ++d) {
if (d != 0)
title += "x";
title += extent[d];
}
title += ")";
varSelections[i] = title;
defaultValues[i] = false;
}
logger.info("addcheckboxgroup with " + varList.size() + " rows");
gd.addCheckboxGroup(varList.size(), 1, varSelections, defaultValues);
addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
}
// varList should have size=0 if only grouping is wanted
// use old style
for (int i = 0; i < varList.size(); ++i) {
if (gd.getNextBoolean()) {
Dataset var = varList.get(i);
for (Dataset var : selectedDatasets) {
// Read dataset attributes and properties
String datasetName = var.getName();
int rank = var.getRank();
Datatype datatype = var.getDatatype();
Datatype datatypeIfUnsupported = null;
long[] extent = var.getDims();
long[] extent = var.getDims(); // Extent in px (level,row,col)
logger.info("Reading Variable: " + var.getName());
logger.info(" Rank = " + rank + ", Data-type = " + datatype.getDatatypeDescription());
System.out.print(" Extent in px (level,row,col):");
for (int d = 0; d < rank; ++d)
System.out.print(" " + extent[d]);
logger.info("");
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
logger.info("Reading dataset: " + datasetName + " Rank: " + rank + " Type: " + datatype.getDatatypeDescription());
Attribute elemsize_att = HDF5Utilities.getAttributes(var).get("element_size_um");
double[] elem_sizes = new double[3];
if (elemsize_att == null) {
elem_sizes[0] = 1.0;
elem_sizes[1] = 1.0;
elem_sizes[2] = 1.0;
} else {
logger.info("Reading element_size_um");
Object tmp = elemsize_att.getValue();
if (tmp instanceof float[]) {
elem_sizes[0] = ((float[]) tmp)[0];
elem_sizes[1] = ((float[]) tmp)[1];
elem_sizes[2] = ((float[]) tmp)[2];
} else if (tmp instanceof double[]) {
elem_sizes[0] = ((double[]) tmp)[0];
elem_sizes[1] = ((double[]) tmp)[1];
elem_sizes[2] = ((double[]) tmp)[2];
} else {
String title = "Error Reading Element Size";
String msg = "The element_size_um attribute has " + "wrong format!\n" + "Setting to default size of (1,1,1)...";
new ij.gui.MessageDialog(null, title, msg);
elem_sizes[0] = 1.0;
elem_sizes[1] = 1.0;
elem_sizes[2] = 1.0;
}
}
logger.info(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
double pixelWith = 1.0;
double pixelHeight = 1.0;
double pixelDepth = 1.0;
IJ.showStatus("Reading Variable: " + datasetName + " (" + extent[0] + " slices)");
// nice gadget to update the progress bar
long progressDivisor = extent[0] / 50; // we assume 50 process steps
long progressDivisor = extent[0] / 50; // we assume 50 process
// steps
if (progressDivisor < 1)
progressDivisor = 1;
@ -305,16 +202,16 @@ public class HDF5Reader implements PlugIn {
// select hyperslab for lev
// start[1] = lev;
// Object slice = var.read();
int startIdx = (int)((volIDX * singleVolumeSize * 3) + (lev * stackSize * 3));
// long numElements = stackSize * 3;
int endIdx = (int)(startIdx+stackSize*3-1);
int startIdx = (int) ((volIDX * singleVolumeSize * 3) + (lev * stackSize * 3));
// long numElements = stackSize * 3;
int endIdx = (int) (startIdx + stackSize * 3 - 1);
copyPixels3(datatypeIfUnsupported, extent, stack, wholeDataset, size, startIdx, endIdx);
}
}
IJ.showProgress(1.f);
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
// new for hyperstack
int nChannels = 3;
int nSlices = (int) extent[1];
@ -327,18 +224,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(nChannels, nSlices, nFrames);
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
// imp = imp.createHyperStack(directory + name + " "
// + var.getName(),
// nChannels,
// nSlices,
// nFrames,32);
// imp.setStack(stack,nChannels,nSlices,nFrames);
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// getMinMax();
// imp.setDisplayRange(0,229);
imp.getCalibration().pixelDepth = pixelDepth;
imp.getCalibration().pixelHeight = pixelHeight;
imp.getCalibration().pixelWidth = pixelWith;
imp.resetDisplayRange();
imp.show();
} else if (rank == 4) {
@ -392,15 +280,16 @@ public class HDF5Reader implements PlugIn {
// start[0] = lev;
// Object slice = var.read();
int startIdx = (int)(lev * stackSize);
// long numElements = stackSize;
int endIdx = (int)(startIdx+stackSize-1);
// Object slice = extractSubarray(wholeDataset, startIdx, numElements);
int startIdx = (int) (lev * stackSize);
// long numElements = stackSize;
int endIdx = (int) (startIdx + stackSize - 1);
// Object slice = extractSubarray(wholeDataset,
// startIdx, numElements);
int size = (int) (extent[2] * extent[1]);
copyPixel1(extent, stack, wholeDataset, startIdx, endIdx, size);
copyPixels1(extent, stack, wholeDataset, startIdx, endIdx, size);
}
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
// new for hyperstack
int nChannels = 3;
int nSlices = (int) extent[0];
@ -408,12 +297,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(nChannels, nSlices, nFrames);
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// getMinMax();
imp.getCalibration().pixelDepth = pixelDepth;
imp.getCalibration().pixelHeight = pixelHeight;
imp.getCalibration().pixelWidth = pixelWith;
imp.resetDisplayRange();
imp.show();
imp.updateStatusbarValue();
@ -470,16 +356,16 @@ public class HDF5Reader implements PlugIn {
// select hyperslab for lev
// start[1] = lev;
// Object slice = var.read();
int startIdx = (int)((volIDX * singleVolumeSize) + (lev * stackSize));
int endIdx = (int)(startIdx+stackSize-1);
// long numElements = stackSize;
int startIdx = (int) ((volIDX * singleVolumeSize) + (lev * stackSize));
int endIdx = (int) (startIdx + stackSize - 1);
// long numElements = stackSize;
convertDatatypesAndSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
}
}
IJ.showProgress(1.f);
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
// new for hyperstack
int nChannels = 1;
int nSlices = (int) extent[1];
@ -494,12 +380,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(nChannels, nSlices, nFrames);
imp.setOpenAsHyperStack(true);
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// getMinMax();
// imp.setDisplayRange(0,229);
imp.getCalibration().pixelDepth = pixelDepth;
imp.getCalibration().pixelHeight = pixelHeight;
imp.getCalibration().pixelWidth = pixelWith;
imp.resetDisplayRange();
imp.show();
}
@ -538,10 +421,10 @@ public class HDF5Reader implements PlugIn {
int size = (int) (extent[1] * extent[0]);
// ugly but working: copy pixel by pixel
copyPixels(extent, stack, slice, size);
copyPixels2(extent, stack, slice, size);
IJ.showProgress(1.f);
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
// new for hyperstack
int nChannels = 3;
int nSlices = 1;
@ -549,11 +432,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(nChannels, nSlices, nFrames);
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// getMinMax();
imp.getCalibration().pixelDepth = pixelDepth;
imp.getCalibration().pixelHeight = pixelHeight;
imp.getCalibration().pixelWidth = pixelWith;
imp.resetDisplayRange();
imp.show();
imp.updateStatusbarValue();
@ -600,17 +481,16 @@ public class HDF5Reader implements PlugIn {
// start[0] = lev;
// Object slice = var.read();
int startIdx = (int)(lev * stackSize);
int endIdx = (int)(startIdx+stackSize-1);
// long numElements = stackSize;
int startIdx = (int) (lev * stackSize);
int endIdx = (int) (startIdx + stackSize - 1);
// long numElements = stackSize;
convertDatatypesAndSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
}
IJ.showProgress(1.f);
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// getMinMax();
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
imp.getCalibration().pixelDepth = pixelDepth;
imp.getCalibration().pixelHeight = pixelHeight;
imp.getCalibration().pixelWidth = pixelWith;
imp.resetDisplayRange();
imp.show();
imp.updateStatusbarValue();
@ -697,7 +577,7 @@ public class HDF5Reader implements PlugIn {
stack.addSlice(null, slice);
}
IJ.showProgress(1.f);
ImagePlus imp = new ImagePlus(directory + name + " " + var.getName(), stack);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
imp.getProcessor().resetMinAndMax();
imp.show();
@ -716,7 +596,6 @@ public class HDF5Reader implements PlugIn {
IJ.showStatus("Variable Dimension " + rank + " not supported");
}
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Error while opening '" + directory + name + "'", e);
@ -724,22 +603,99 @@ public class HDF5Reader implements PlugIn {
} catch (OutOfMemoryError o) {
IJ.outOfMemory("Load HDF5");
}
// make sure the file is closed after working with it
// FIXME: should happen in catch-part, too!
finally{
try {
if (inFile != null)
if (inFile != null){
inFile.close();
}
} catch (HDF5Exception err) {
System.err.println("Error while closing '" + directory + name + "'");
System.err.println(err);
IJ.showStatus("Error closing file.");
}
}
IJ.showProgress(1.0);
}
/**
* Selection of the datasets to visualize
*
* @param datasets
* @return List of datasets to visualize. If nothing selected the list will be empty
* @throws HDF5Exception
*/
private List<Dataset> selectDatasets(List<Dataset> datasets) throws HDF5Exception {
List<Dataset> selectedDatasets = new ArrayList<>();
GenericDialog gd = new GenericDialog("Variable Name Selection");
gd.addMessage("Please select variables to be loaded.\n");
if (datasets.size() < 1) {
IJ.error("The file does not contain datasets");
} else if (datasets.size() > 1000) {
logger.info("#######");
for(Dataset d: datasets){
logger.info(d.getFullName());
}
logger.info("#######");
gd = new GenericDialog("Variable Name Selection");
gd.addMessage("There are lots of datasets in your file (check the log output which datasets are available)! Please enter the full path of the dataset to be displayed");
gd.addStringField("Dataset", "");
gd.showDialog();
if (!gd.wasCanceled()) {
String dsName = gd.getNextString();
for(Dataset d: datasets){
if(d.getFullName().equals(dsName)){
selectedDatasets.add(d);
}
}
if(selectedDatasets.isEmpty()){
IJ.error("The file does not contain a variable with name " + "`" + dsName + "`!");
}
}
} else {
String[] varSelections = new String[datasets.size()];
boolean[] defaultValues = new boolean[datasets.size()];
for (int i = 0; i < datasets.size(); i++) {
Dataset var = datasets.get(i);
int rank = var.getRank();
String title = rank + "D: " + var.getFullName() + " " + var.getDatatype().getDatatypeDescription() + "( ";
long[] extent = var.getDims();
for (int d = 0; d < rank; ++d) {
if (d != 0){
title += "x";
}
title += extent[d];
}
title += ")";
varSelections[i] = title;
defaultValues[i] = false;
}
logger.info("Add checkbox group with " + datasets.size() + " rows");
gd.addCheckboxGroup(datasets.size(), 1, varSelections, defaultValues);
addScrollBars(gd);
gd.showDialog();
if (!gd.wasCanceled()) {
// Get selected datasets
for (int i = 0; i < datasets.size(); ++i) {
if (gd.getNextBoolean()) {
selectedDatasets.add(datasets.get(i));
}
}
}
}
return selectedDatasets;
}
/**
* @param datatypeIfUnsupported
* @param stack
@ -945,7 +901,7 @@ public class HDF5Reader implements PlugIn {
* @param slice
* @param size
*/
private void copyPixels(long[] extent, ImageStack stack, Object slice, int size) {
private void copyPixels2(long[] extent, ImageStack stack, Object slice, int size) {
if (slice instanceof byte[]) {
byte[] tmp = (byte[]) slice;
byte[] rChannel = new byte[size];
@ -1060,7 +1016,7 @@ public class HDF5Reader implements PlugIn {
* @param endIdx
* @param size
*/
private void copyPixel1(long[] extent, ImageStack stack, Object wholeDataset, int startIdx, int endIdx, int size) {
private void copyPixels1(long[] extent, ImageStack stack, Object wholeDataset, int startIdx, int endIdx, int size) {
if (wholeDataset instanceof byte[]) {
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
byte[] rChannel = new byte[size];
@ -1166,32 +1122,6 @@ public class HDF5Reader implements PlugIn {
}
}
/**
* Recursively get list of all datasets in file
* @param group Group to search for datasets
* @param datasets List of datasets
* @return List of datasets or null if group is null
*/
private List<Dataset> getDatasets(Group group, List<Dataset> datasets) {
if (group == null){
return datasets;
}
for (HObject o: group.getMemberList()) {
if (o instanceof Dataset) {
((Dataset) o).init();
datasets.add((Dataset) o);
} else if (o instanceof Group) {
datasets = (getDatasets((Group) o, datasets));
}
}
return datasets;
}
private float[] convertDoubleToFloat(double[] dataIn) {
float[] dataOut = new float[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {

View File

@ -1,11 +1,19 @@
package ch.psi.imagej.hdf5;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.swing.tree.DefaultMutableTreeNode;
import ncsa.hdf.object.Attribute;
import ncsa.hdf.object.Dataset;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.Group;
import ncsa.hdf.object.HObject;
import ncsa.hdf.object.h5.H5File;
public class HDF5Utilities {
@ -36,4 +44,182 @@ public class HDF5Utilities {
return attributes;
}
/**
* TODO: to be replaced by a simple regex expression
*
* Retrieve relative group descriptor for given descriptor
*
* Example:
* The group descriptor of /test/one/two/three is test/one/two
*
* @param descriptor Full qualified descriptor
* @return Group descriptor
*/
public static String getGroupDescriptor(String descriptor) {
String groupName = descriptor;
// Trim leading and trailing slashes
while (groupName.charAt(0) == '/') {
groupName = groupName.substring(1);
}
while (groupName.charAt(groupName.length() - 1) == '/') {
groupName = groupName.substring(0, groupName.length() - 2);
}
int posOfLastSlash = groupName.lastIndexOf('/');
if (posOfLastSlash == -1)
return null;
else
return groupName.substring(0, posOfLastSlash);
}
/**
* TODO: to be replaced by a simple regex expression
*
* Get relative dataset descriptor
*
* Example:
*
* /a/b/c/d/ returns d
*
* @param descriptor
* @return relative dataset descriptor
*/
public static String getDataSetDescriptor(String descriptor) {
String dataSetName = descriptor;
// Trim leading and trailing slashes
while (dataSetName.charAt(0) == '/') {
dataSetName = dataSetName.substring(1);
}
while (dataSetName.charAt(dataSetName.length() - 1) == '/') {
dataSetName = dataSetName.substring(0, dataSetName.length() - 2);
}
int posOfLastSlash = dataSetName.lastIndexOf('/');
if (posOfLastSlash == -1)
return dataSetName;
else
return dataSetName.substring(posOfLastSlash + 1);
}
/**
* Creates group recursively relative to the given base group
*
* @param groupRelativName relative group to be created
* @param group Base group - if null create group relative to /
* @param file File handle
* @return
*/
public static Group createGroup(String groupRelativName, Group group, FileFormat file) {
if (groupRelativName == null || file == null)
return null;
if (group == null){
group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject();
}
// Trim leading and trailing slashes
while (groupRelativName.charAt(0) == '/') {
groupRelativName = groupRelativName.substring(1);
}
while (groupRelativName.charAt(groupRelativName.length() - 1) == '/') {
groupRelativName = groupRelativName.substring(0, groupRelativName.length() - 2);
}
int posOfSlash = groupRelativName.indexOf('/');
if (posOfSlash == -1) {
try {
Group newGroup;
String newGroupName;
if (group.isRoot()){
newGroupName = "/" + groupRelativName;
}
else{
newGroupName = group.getFullName() + "/" + groupRelativName;
}
newGroup = (Group) file.get(newGroupName);
if (newGroup == null){
newGroup = file.createGroup(newGroupName, group);
}
return newGroup;
} catch (Exception e) {
return null;
}
} else {
String subgroupRelativName = groupRelativName.substring(posOfSlash);
String currentGroup = groupRelativName.substring(0, posOfSlash);
logger.info("Create: " + currentGroup);
logger.info("Call back for: " + subgroupRelativName);
try {
Group newGroup;
String newGroupName;
if (group.isRoot()){
newGroupName = "/" + currentGroup;
}
else {
newGroupName = group.getFullName() + "/" + currentGroup;
}
logger.info("try opening: " + newGroupName);
newGroup = (Group) file.get(newGroupName);
if (newGroup == null) {
newGroup = file.createGroup(newGroupName, group);
}
return createGroup(subgroupRelativName, newGroup, file);
} catch (Exception e) {
return null;
}
}
// never come here
}
/**
* Get all datasets of a file
* @param file
* @return
*/
public static List<Dataset> getDatasets(H5File file) {
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) file.getRootNode()).getUserObject();
List<Dataset> datasets = getDatasets(rootNode);
return datasets;
}
/**
* Get all datasets of a group
* @param group Group to search for datasets
* @return List of datasets If group is null datasets will be empty
*/
public static List<Dataset> getDatasets(Group group){
List<Dataset> datasets = new ArrayList<>();
return getDatasets(group, datasets);
}
/**
* Recursively get list of all datasets in file
* @param group Group to search for datasets
* @param datasets List of datasets
* @return List of datasets. If group is null datasets will be empty
*/
public static List<Dataset> getDatasets(Group group, List<Dataset> datasets) {
if (group == null){
return datasets;
}
for (HObject o: group.getMemberList()) {
if (o instanceof Dataset) {
((Dataset) o).init();
datasets.add((Dataset) o);
} else if (o instanceof Group) {
datasets = (getDatasets((Group) o, datasets));
}
}
return datasets;
}
}

View File

@ -28,8 +28,6 @@ import ij.gui.*;
import java.util.logging.Logger;
import javax.swing.tree.DefaultMutableTreeNode;
import ncsa.hdf.object.*; // the common object package
import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
@ -144,12 +142,14 @@ public class HDF5Writer implements PlugInFilter {
IJ.showProgress(f, nFrames);
for (int c = 0; c < nChannels; c++) {
String fullName = makeDataSetName(formatTokens, f, c);
String dataSetName = getDataSetDescriptor(fullName);
logger.info("dataset name: " + dataSetName);
String groupName = getGroupDescriptor(fullName);
logger.info("group name: " + groupName);
String dataSetName = HDF5Utilities.getDataSetDescriptor(fullName);
String groupName = HDF5Utilities.getGroupDescriptor(fullName);
logger.info("group name: " + groupName + " dataset name: " + dataSetName);
// ensure group exists
Group group = createGroupRecursive(groupName, null, outFile);
Group group = HDF5Utilities.createGroup(groupName, null, outFile);
// create data set
Dataset dataset = null;
// select hyperslabs
@ -258,13 +258,12 @@ public class HDF5Writer implements PlugInFilter {
logger.info("writing data to variable: " + varName);
String dataSetName = getDataSetDescriptor(varName);
logger.info("dataset name: " + dataSetName);
String groupName = getGroupDescriptor(varName);
logger.info("group name: " + groupName);
String dataSetName = HDF5Utilities.getDataSetDescriptor(varName);
String groupName = HDF5Utilities.getGroupDescriptor(varName);
logger.info("group name: " + groupName + " dataset name: " + dataSetName);
// ensure group exists
Group group = createGroupRecursive(groupName, null, outFile);
Group group = HDF5Utilities.createGroup(groupName, null, outFile);
@ -390,102 +389,6 @@ public class HDF5Writer implements PlugInFilter {
}
private static Group createGroupRecursive(String groupRelativName, Group group, FileFormat file) {
if (groupRelativName == null || file == null)
return null;
if (group == null)
group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject();
while (groupRelativName.charAt(0) == '/') {
// trim leading slash
groupRelativName = groupRelativName.substring(1);
}
while (groupRelativName.charAt(groupRelativName.length() - 1) == '/') {
// trim last slash
groupRelativName = groupRelativName.substring(0, groupRelativName.length() - 2);
}
int posOfSlash = groupRelativName.indexOf('/');
if (posOfSlash == -1) {
try {
Group newGroup;
String newGroupName;
if (group.isRoot())
newGroupName = "/" + groupRelativName;
else
newGroupName = group.getFullName() + "/" + groupRelativName;
newGroup = (Group) file.get(newGroupName);
if (newGroup == null)
newGroup = file.createGroup(newGroupName, group);
return newGroup;
} catch (Exception e) {
return null;
}
} else {
String subgroupRelativName = groupRelativName.substring(posOfSlash);
String currentGroup = groupRelativName.substring(0, posOfSlash);
logger.info("Create: " + currentGroup);
logger.info("Call back for: " + subgroupRelativName);
try {
Group newGroup;
String newGroupName;
if (group.isRoot())
newGroupName = "/" + currentGroup;
else
newGroupName = group.getFullName() + "/" + currentGroup;
logger.info("try opening: " + newGroupName);
newGroup = (Group) file.get(newGroupName);
if (newGroup == null)
newGroup = file.createGroup(newGroupName, group);
return createGroupRecursive(subgroupRelativName, newGroup, file);
} catch (Exception e) {
return null;
}
}
// never come here
}
private static String getGroupDescriptor(String absName) {
String groupName = absName;
while (groupName.charAt(0) == '/') {
// trim leading slash
groupName = groupName.substring(1);
}
while (groupName.charAt(groupName.length() - 1) == '/') {
// trim last slash
groupName = groupName.substring(0, groupName.length() - 2);
}
int posOfLastSlash = groupName.lastIndexOf('/');
if (posOfLastSlash == -1)
return null;
else
return groupName.substring(0, posOfLastSlash);
}
private static String getDataSetDescriptor(String absName) {
String dataSetName = absName;
while (dataSetName.charAt(0) == '/') {
// trim leading slash
dataSetName = dataSetName.substring(1);
}
while (dataSetName.charAt(dataSetName.length() - 1) == '/') {
// trim last slash
dataSetName = dataSetName.substring(0, dataSetName.length() - 2);
}
int posOfLastSlash = dataSetName.lastIndexOf('/');
if (posOfLastSlash == -1)
return dataSetName;
else
return dataSetName.substring(posOfLastSlash + 1);
}
long[] findOptimalChunksize(int Rank, long[] dataDims) {
long[] best_chunksize = new long[Rank];
int maxChunkVol = 262144;