diff --git a/.classpath b/.classpath deleted file mode 100644 index bd7877d..0000000 --- a/.classpath +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/.gitignore b/.gitignore index 786f8d5..4b16010 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ imagej.hdf5.iml /target .gradle build +out/ diff --git a/.project b/.project deleted file mode 100644 index bdfa458..0000000 --- a/.project +++ /dev/null @@ -1,29 +0,0 @@ - - - ch.psi.imagej.hdf5 - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.m2e.core.maven2Builder - - - - - ch.acanda.eclipse.pmd.builder.PMDBuilder - - - - - - org.eclipse.jdt.core.javanature - org.eclipse.m2e.core.maven2Nature - ch.acanda.eclipse.pmd.builder.PMDNature - - diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs deleted file mode 100644 index 5b781ec..0000000 --- a/.settings/org.eclipse.core.resources.prefs +++ /dev/null @@ -1,3 +0,0 @@ -eclipse.preferences.version=1 -encoding//src/main/java=UTF-8 -encoding//src/test/java=UTF-8 diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index ec4300d..0000000 --- a/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,5 +0,0 @@ -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7 -org.eclipse.jdt.core.compiler.compliance=1.7 -org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning -org.eclipse.jdt.core.compiler.source=1.7 diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs deleted file mode 100644 index f897a7f..0000000 --- a/.settings/org.eclipse.m2e.core.prefs +++ /dev/null @@ -1,4 +0,0 @@ -activeProfiles= -eclipse.preferences.version=1 -resolveWorkspaceProjects=true -version=1 diff --git a/build.gradle b/build.gradle index 86d6b87..9ccdcc5 100644 --- a/build.gradle +++ b/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'java' //apply plugin: 'maven' group = 'ch.psi' -version = '0.12.0' +version = '0.13.0' description = """""" @@ -10,10 +10,13 @@ sourceCompatibility = 1.8 targetCompatibility = 1.8 - repositories { - mavenCentral() - maven { url "http://artifacts.psi.ch/artifactory/libs-releases" } + mavenCentral() + maven { url "http://artifacts.psi.ch/artifactory/libs-releases" } + maven { url "http://maven.imagej.net/content/repositories/public/" } + flatDir { + dirs 'lib' + } } // define a provided scope @@ -24,14 +27,20 @@ configurations { dependencies { - compile group: 'hdf5', name: 'hdf', version:'2.10.0' - compile group: 'hdf5', name: 'hdfobj', version:'2.10.0' - compile group: 'hdf5', name: 'hdf5', version:'2.10.0' - compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0' - compile group: 'org.slf4j', name: 'slf4j-api', version:'1.7.6' - testCompile group: 'junit', name: 'junit', version:'4.11' + compile name: 'sis-jhdf5-19.04.0' + compile name: 'sis-base-18.09.0' + compile name: 'commons-io-2.6' +// compile name: 'sis-base-18.09.0' +// compile group: 'cisd', name: 'jhdf5', version: '14.12.6' +// compile group: 'hdf5', name: 'hdf', version:'2.10.0' +// compile group: 'hdf5', name: 'hdfobj', version:'2.10.0' +// compile group: 'hdf5', name: 'hdf5', version:'2.10.0' +// compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0' + compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.6' + testCompile group: 'junit', name: 'junit', version: '4.11' - provided group: 'net.imagej', name: 'ij', version:'1.53e' + + provided group: 'net.imagej', name: 'ij', version: '1.53e' } task fatJar(type: Jar) { @@ -41,12 +50,11 @@ task fatJar(type: Jar) { // 'Main-Class': 'ch.psi.caview.CaView' // } archiveBaseName = 'HDF5_Viewer' - from { (configurations.compile- configurations.provided).collect { it.isDirectory() ? it : zipTree(it) } } + from { (configurations.compile - configurations.provided).collect { it.isDirectory() ? it : zipTree(it) } } with jar -// { exclude group: "net.imagej", name:'ij'} } -// -//task distributionZip(type: Zip, dependsOn: [jar]) { + +//task distributionZip(type: Zip, dependsOn: [fatJar]) { // baseName "${project.group}-jhdf5" // // from('targets/dist') { diff --git a/lib/commons-io-2.6.jar b/lib/commons-io-2.6.jar new file mode 100644 index 0000000..00556b1 Binary files /dev/null and b/lib/commons-io-2.6.jar differ diff --git a/lib/sis-base-18.09.0.jar b/lib/sis-base-18.09.0.jar new file mode 100644 index 0000000..2af84b5 Binary files /dev/null and b/lib/sis-base-18.09.0.jar differ diff --git a/lib/sis-jhdf5-19.04.0.jar b/lib/sis-jhdf5-19.04.0.jar new file mode 100644 index 0000000..458827d Binary files /dev/null and b/lib/sis-jhdf5-19.04.0.jar differ diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 170cfb9..0000000 --- a/pom.xml +++ /dev/null @@ -1,77 +0,0 @@ - - 4.0.0 - ch.psi - imagej.hdf5 - 0.12.0 - - - - gov.nih.imagej - imagej - 1.46 - provided - - - - hdf5 - hdf - 2.10.0 - - - hdf5 - hdfobj - 2.10.0 - - - hdf5 - hdf5 - 2.10.0 - - - hdf5 - hdf5obj - 2.10.0 - - - - org.slf4j - slf4j-api - 1.7.6 - - - - junit - junit - 4.11 - test - - - - - - - maven-compiler-plugin - 2.3.2 - - UTF-8 - 1.7 - 1.7 - - - - maven-assembly-plugin - 2.4 - - HDF5_Viewer-${pom.version} - false - - - src/main/assembly/assembly_jar.xml - src/main/assembly/assembly.xml - - - - - - \ No newline at end of file diff --git a/settings.xml b/settings.xml deleted file mode 100644 index d91b178..0000000 --- a/settings.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - false - - central - libs-releases - http://artifacts.psi.ch/artifactory/libs-releases - - - - snapshots - libs-snapshots - http://artifacts.psi.ch/artifactory/libs-snapshots - - - - - - false - - central - libs-releases - http://artifacts.psi.ch/artifactory/libs-releases - - - - snapshots - libs-releases - http://artifacts.psi.ch/artifactory/libs-releases - - - artifactory - - - - artifactory - - \ No newline at end of file diff --git a/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java b/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java index f2313d3..1ce0118 100644 --- a/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java +++ b/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java @@ -3,7 +3,7 @@ package ch.psi.imagej.hdf5; import java.util.ArrayList; import java.util.List; -import ncsa.hdf.object.Dataset; +import hdf.object.Dataset; public class DatasetSelection { diff --git a/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java b/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java index 29a17ec..11f9a71 100644 --- a/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java +++ b/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java @@ -1,5 +1,9 @@ package ch.psi.imagej.hdf5; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.object.Dataset; +import hdf.object.Datatype; +import hdf.object.h5.H5File; import ij.IJ; import ij.ImagePlus; import ij.CompositeImage; @@ -14,10 +18,6 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; -import ncsa.hdf.object.*; -import ncsa.hdf.object.h5.*; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; - public class HDF5Reader implements PlugIn { private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName()); @@ -139,7 +139,7 @@ public class HDF5Reader implements PlugIn { int numberOfDimensions = var.getRank(); long[] dimensions= var.getDims(); - logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDatatypeDescription()); + logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDescription()); // Read dataset diff --git a/src/main/java/ch/psi/imagej/hdf5/HDF5Utilities.java b/src/main/java/ch/psi/imagej/hdf5/HDF5Utilities.java index 1767fce..3b419b0 100644 --- a/src/main/java/ch/psi/imagej/hdf5/HDF5Utilities.java +++ b/src/main/java/ch/psi/imagej/hdf5/HDF5Utilities.java @@ -11,40 +11,40 @@ import java.util.regex.Pattern; import javax.swing.tree.DefaultMutableTreeNode; -import ncsa.hdf.object.Attribute; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; -import ncsa.hdf.object.h5.H5File; +import hdf.object.Attribute; +import hdf.object.Dataset; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.HObject; +import hdf.object.h5.H5File; public class HDF5Utilities { private static final Logger logger = Logger.getLogger(HDF5Utilities.class.getName()); - /** - * Get attributes from object - * @param object Object to retrieve the attributes from - * @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null - */ - public static Map getAttributes(HObject object) { - Objects.requireNonNull(object); - - Map attributes = new HashMap<>(); - try{ - for(Object m: object.getMetadata()){ - if(m instanceof Attribute){ - attributes.put(((Attribute) m).getName(), (Attribute) m); - } - } - } - catch(Exception e){ - logger.warning("Unable to retrieve metadata from object"); - return null; - } - - return attributes; - } +// /** +// * Get attributes from object +// * @param object Object to retrieve the attributes from +// * @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null +// */ +// public static Map getAttributes(HObject object) { +// Objects.requireNonNull(object); +// +// Map attributes = new HashMap<>(); +// try{ +// for(Object m: object.getMetadata()){ +// if(m instanceof Attribute){ +// attributes.put(((Attribute) m).getName(), (Attribute) m); +// } +// } +// } +// catch(Exception e){ +// logger.warning("Unable to retrieve metadata from object"); +// return null; +// } +// +// return attributes; +// } /** @@ -85,7 +85,7 @@ public class HDF5Utilities { * @return */ public static Group createGroup( FileFormat file, String groupName) { - return createGroup(file, (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject(), groupName); + return createGroup(file, (Group) file.getRootObject(), groupName); } /** @@ -100,7 +100,7 @@ public class HDF5Utilities { Objects.requireNonNull(groupName); if (group == null){ - group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject(); + group = (Group) file.getRootObject(); } Group ngroup = group; @@ -130,7 +130,7 @@ public class HDF5Utilities { * @return */ public static List getDatasets(H5File file) { - Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) file.getRootNode()).getUserObject(); + Group rootNode = (Group) file.getRootObject(); List datasets = getDatasets(rootNode); return datasets; } diff --git a/src/main/java/ch/psi/imagej/hdf5/HDF5Writer.java b/src/main/java/ch/psi/imagej/hdf5/HDF5Writer.java index eb8508c..52a75f7 100644 --- a/src/main/java/ch/psi/imagej/hdf5/HDF5Writer.java +++ b/src/main/java/ch/psi/imagej/hdf5/HDF5Writer.java @@ -1,6 +1,12 @@ package ch.psi.imagej.hdf5; +import hdf.object.Dataset; +import hdf.object.Datatype; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.h5.H5Datatype; +import hdf.object.h5.H5File; import ij.*; import ij.io.*; import ij.plugin.filter.PlugInFilter; @@ -10,9 +16,9 @@ import ij.gui.*; import java.util.logging.Level; import java.util.logging.Logger; -import ncsa.hdf.object.*; // the common object package -import ncsa.hdf.object.h5.*; // the HDF5 implementation -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; +//import hdf.object.*; // the common object package +//import hdf.object.h5.*; // the HDF5 implementation +import hdf.hdf5lib.exceptions.HDF5Exception; public class HDF5Writer implements PlugInFilter { @@ -60,18 +66,22 @@ public class HDF5Writer implements PlugInFilter { int imgColorType = imp.getType(); Datatype type = null; - if (imgColorType == ImagePlus.GRAY8) { - logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8"); - type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); - } else if (imgColorType == ImagePlus.GRAY16) { - logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16"); - type = new H5Datatype(Datatype.CLASS_INTEGER, 2, Datatype.NATIVE, Datatype.SIGN_NONE); - } else if (imgColorType == ImagePlus.GRAY32) { - logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32"); - type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1); - } else if (imgColorType == ImagePlus.COLOR_RGB) { - logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB"); - type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + try { + if (imgColorType == ImagePlus.GRAY8) { + logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8"); + type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + } else if (imgColorType == ImagePlus.GRAY16) { + logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16"); + type = new H5Datatype(Datatype.CLASS_INTEGER, 2, Datatype.NATIVE, Datatype.SIGN_NONE); + } else if (imgColorType == ImagePlus.GRAY32) { + logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32"); + type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1); + } else if (imgColorType == ImagePlus.COLOR_RGB) { + logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB"); + type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + } + } catch (Exception e){ + throw new RuntimeException("Unable to create dataset", e); } if (imp.getOpenAsHyperStack() || imp.isHyperStack()) { diff --git a/src/main/java/ch/psi/imagej/hdf5/SelectionPanel.java b/src/main/java/ch/psi/imagej/hdf5/SelectionPanel.java index f61f40a..6b49c2f 100644 --- a/src/main/java/ch/psi/imagej/hdf5/SelectionPanel.java +++ b/src/main/java/ch/psi/imagej/hdf5/SelectionPanel.java @@ -14,7 +14,7 @@ import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.ScrollPaneConstants; -import ncsa.hdf.object.Dataset; +import hdf.object.Dataset; import javax.swing.JTextField; import java.awt.FlowLayout; diff --git a/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java b/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java index ed648a6..7c64893 100644 --- a/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java +++ b/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java @@ -3,8 +3,8 @@ package ch.psi.imagej.hdf5; import java.util.logging.Level; import java.util.logging.Logger; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.h5.H5File; +import hdf.object.Dataset; +import hdf.object.h5.H5File; import ij.ImageStack; import ij.process.ByteProcessor; import ij.process.ColorProcessor; diff --git a/src/main/java/hdf/object/Attribute.java b/src/main/java/hdf/object/Attribute.java new file mode 100644 index 0000000..921a868 --- /dev/null +++ b/src/main/java/hdf/object/Attribute.java @@ -0,0 +1,1459 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.lang.reflect.Array; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.object.h5.H5Datatype; + +/** + * An attribute is a (name, value) pair of metadata attached to a primary data object such as a + * dataset, group or named datatype. + *

+ * Like a dataset, an attribute has a name, datatype and dataspace. + * + *

+ * For more details on attributes, HDF5 + * User's Guide + *

+ * + * The following code is an example of an attribute with 1D integer array of two elements. + * + *

+ * // Example of creating a new attribute
+ * // The name of the new attribute
+ * String name = "Data range";
+ * // Creating an unsigned 1-byte integer datatype
+ * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
+ *                              1,                      // size in bytes
+ *                              Datatype.ORDER_LE,      // byte order
+ *                              Datatype.SIGN_NONE);    // unsigned
+ * // 1-D array of size two
+ * long[] dims = {2};
+ * // The value of the attribute
+ * int[] value = {0, 255};
+ * // Create a new attribute
+ * Attribute dataRange = new Attribute(name, type, dims);
+ * // Set the attribute value
+ * dataRange.setValue(value);
+ * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
+ * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
+ * 
+ * + * + * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and + * strings. For a compound datatype, it will be a 1D array of strings with field members separated + * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, + * float} of three data points. + * + * @see hdf.object.Datatype + * + * @version 2.0 4/2/2018 + * @author Peter X. Cao, Jordan T. Henderson + */ +public class Attribute extends Dataset implements DataFormat, CompoundDataFormat { + + private static final long serialVersionUID = 2072473407027648309L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Attribute.class); + + /** The HObject to which this Attribute is attached */ + protected HObject parentObject; + + /** additional information and properties for the attribute */ + private transient Map properties; + + /** + * Flag to indicate is the original unsigned C data is converted. + */ + protected boolean unsignedConverted; + + /** Flag to indicate if the attribute data is a single scalar point */ + protected final boolean isScalar; + + /** Fields for Compound datatype attributes */ + + /** + * A list of names of all compound fields including nested fields. + *

+ * The nested names are separated by CompoundDS.SEPARATOR. For example, if + * compound attribute "A" has the following nested structure, + * + *

+     * A --> m01
+     * A --> m02
+     * A --> nest1 --> m11
+     * A --> nest1 --> m12
+     * A --> nest1 --> nest2 --> m21
+     * A --> nest1 --> nest2 --> m22
+     * i.e.
+     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
+     * 
+ * + * The flatNameList of compound attribute "A" will be {m01, m02, nest1[m11, + * nest1[m12, nest1[nest2[m21, nest1[nest2[m22} + * + */ + private List flatNameList; + + /** + * A list of datatypes of all compound fields including nested fields. + */ + private List flatTypeList; + + /** + * The number of members of the compound attribute. + */ + protected int numberOfMembers = 0; + + /** + * The names of the members of the compound attribute. + */ + protected String[] memberNames = null; + + /** + * Array containing the total number of elements of the members of this compound + * attribute. + *

+ * For example, a compound attribute COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * memberOrders is an integer array of {1, 5, 6} to indicate that member A has + * one element, member B has 5 elements, and member C has 6 elements. + */ + protected int[] memberOrders = null; + + /** + * The dimension sizes of each member. + *

+ * The i-th element of the Object[] is an integer array (int[]) that contains + * the dimension sizes of the i-th member. + */ + protected transient Object[] memberDims = null; + + /** + * The datatypes of the compound attribute's members. + */ + protected Datatype[] memberTypes = null; + + /** + * The array to store flags to indicate if a member of this compound attribute + * is selected for read/write. + *

+ * If a member is selected, the read/write will perform on the member. + * Applications such as HDFView will only display the selected members of the + * compound attribute. + * + *

+     * For example, if a compound attribute has four members
+     *     String[] memberNames = {"X", "Y", "Z", "TIME"};
+     * and
+     *     boolean[] isMemberSelected = {true, false, false, true};
+     * members "X" and "TIME" are selected for read and write.
+     * 
+ */ + protected boolean[] isMemberSelected = null; + + /** + * Create an attribute with specified name, data type and dimension sizes. + * + * For scalar attribute, the dimension size can be either an array of size one + * or null, and the rank can be either 1 or zero. Attribute is a general class + * and is independent of file format, e.g., the implementation of attribute + * applies to both HDF4 and HDF5. + *

+ * The following example creates a string attribute with the name "CLASS" and + * value "IMAGE". + * + *

+     * long[] attrDims = { 1 };
+     * String attrName = "CLASS";
+     * String[] classValue = { "IMAGE" };
+     * Datatype attrType = null;
+     * try {
+     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
+     * }
+     * catch (Exception ex) {}
+     * Attribute attr = new Attribute(attrName, attrType, attrDims);
+     * attr.setValue(classValue);
+     * 
+ * + * @param parentObj + * the HObject to which this Attribute is attached. + * @param attrName + * the name of the attribute. + * @param attrType + * the datatype of the attribute. + * @param attrDims + * the dimension sizes of the attribute, null for scalar attribute + * + * @see hdf.object.Datatype + */ + public Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { + this(parentObj, attrName, attrType, attrDims, null); + } + + /** + * Create an attribute with specific name and value. + * + * For scalar attribute, the dimension size can be either an array of size one + * or null, and the rank can be either 1 or zero. Attribute is a general class + * and is independent of file format, e.g., the implementation of attribute + * applies to both HDF4 and HDF5. + *

+ * The following example creates a string attribute with the name "CLASS" and + * value "IMAGE". + * + *

+     * long[] attrDims = { 1 };
+     * String attrName = "CLASS";
+     * String[] classValue = { "IMAGE" };
+     * Datatype attrType = null;
+     * try {
+     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
+     * }
+     * catch (Exception ex) {}
+     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
+     * 
+ * + * @param parentObj + * the HObject to which this Attribute is attached. + * @param attrName + * the name of the attribute. + * @param attrType + * the datatype of the attribute. + * @param attrDims + * the dimension sizes of the attribute, null for scalar attribute + * @param attrValue + * the value of the attribute, null if no value + * + * @see hdf.object.Datatype + */ + @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) + public Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { + super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, + (parentObj == null) ? null : parentObj.getFullName(), null); + + this.parentObject = parentObj; + + datatype = attrType; + if (attrDims == null) { + rank = 1; + dims = new long[] { 1 }; + isScalar = true; + } + else { + dims = attrDims; + rank = dims.length; + isScalar = false; + } + + data = attrValue; + properties = new HashMap(); + + unsignedConverted = false; + + selectedDims = new long[rank]; + startDims = new long[rank]; + selectedStride = new long[rank]; + + log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}, isScalar={}", + attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned(), isScalar); + + resetSelection(); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#open() + */ + @Override + public long open() { + log.trace("open(): start"); + + if (parentObject == null) { + log.debug("open(): attribute's parent object is null"); + log.trace("open(): exit"); + return -1; + } + + long aid = -1; + long pObjID = -1; + + try { + pObjID = parentObject.open(); + if (pObjID >= 0) { + if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { + log.trace("open(): FILE_TYPE_HDF5"); + if (H5.H5Aexists(pObjID, getName())) + aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); + } + else if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { + log.trace("open(): FILE_TYPE_HDF4"); + /* + * TODO: Get type of HDF4 object this is attached to and retrieve attribute info. + */ + } + else if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) { + log.trace("open(): FILE_TYPE_NC3"); + /* + * TODO: Get type of netcdf3 object this is attached to and retrieve attribute info. + */ + } + } + + log.trace("open(): aid={}", aid); + } + catch (Exception ex) { + log.debug("open(): Failed to open attribute {}: ", getName(), ex); + aid = -1; + } + finally { + parentObject.close(pObjID); + } + + log.trace("open(): finish"); + + return aid; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#close(int) + */ + @Override + public void close(long aid) { + log.trace("close(): start"); + + if (aid >= 0) { + if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { + log.trace("close(): FILE_TYPE_HDF5"); + try { + H5.H5Aclose(aid); + } + catch (HDF5Exception ex) { + log.debug("close(): H5Aclose({}) failure: ", aid, ex); + } + } + else if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { + log.trace("close(): FILE_TYPE_HDF4"); + /* + * TODO: Get type of HDF4 object this is attached to and close attribute. + */ + } + else if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) { + log.trace("close(): FILE_TYPE_NC3"); + /* + * TODO: Get type of netcdf3 object this is attached to and close attribute. + */ + } + } + + log.trace("close(): finish"); + } + + @Override + public void init() { + log.trace("init(): start"); + + if (inited) { + resetSelection(); + log.trace("init(): Attribute already inited"); + log.trace("init(): finish"); + return; + } + + if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { + long aid = -1; + long tid = -1; + int tclass = -1; + flatNameList = new Vector<>(); + flatTypeList = new Vector<>(); + long[] memberTIDs = null; + + log.trace("init(): FILE_TYPE_HDF5"); + aid = open(); + if (aid >= 0) { + try { + tid = H5.H5Aget_type(aid); + tclass = H5.H5Tget_class(tid); + + long tmptid = 0; + + // Handle ARRAY and VLEN types by getting the base type + if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) { + try { + tmptid = tid; + tid = H5.H5Tget_super(tmptid); + log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid); + } + catch (Exception ex) { + log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex); + tid = -1; + } + finally { + try { + H5.H5Tclose(tmptid); + } + catch (HDF5Exception ex) { + log.debug("init(): H5Tclose({}) failure: ", tmptid, ex); + } + } + } + + if (H5.H5Tget_class(tid) == HDF5Constants.H5T_COMPOUND) { + // initialize member information + H5Datatype.extractCompoundInfo((H5Datatype) getDatatype(), "", flatNameList, flatTypeList); + numberOfMembers = flatNameList.size(); + log.trace("init(): numberOfMembers={}", numberOfMembers); + + memberNames = new String[numberOfMembers]; + memberTIDs = new long[numberOfMembers]; + memberTypes = new Datatype[numberOfMembers]; + memberOrders = new int[numberOfMembers]; + isMemberSelected = new boolean[numberOfMembers]; + memberDims = new Object[numberOfMembers]; + + for (int i = 0; i < numberOfMembers; i++) { + isMemberSelected[i] = true; + memberTIDs[i] = flatTypeList.get(i).createNative(); + + try { + memberTypes[i] = flatTypeList.get(i); + } + catch (Exception ex) { + log.debug("init(): failed to create datatype for member[{}]: ", i, ex); + memberTypes[i] = null; + } + + memberNames[i] = flatNameList.get(i); + memberOrders[i] = 1; + memberDims[i] = null; + log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, i, + memberNames[i], i, memberTIDs[i], i, memberTypes[i]); + + try { + tclass = H5.H5Tget_class(memberTIDs[i]); + } + catch (HDF5Exception ex) { + log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex); + } + + if (tclass == HDF5Constants.H5T_ARRAY) { + int n = H5.H5Tget_array_ndims(memberTIDs[i]); + long mdim[] = new long[n]; + H5.H5Tget_array_dims(memberTIDs[i], mdim); + int idim[] = new int[n]; + for (int j = 0; j < n; j++) + idim[j] = (int) mdim[j]; + memberDims[i] = idim; + tmptid = H5.H5Tget_super(memberTIDs[i]); + memberOrders[i] = (int) (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid)); + try { + H5.H5Tclose(tmptid); + } + catch (HDF5Exception ex) { + log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); + } + } + } // (int i=0; i 2) { + // // hdf-java 2.5 version: 3D dataset is arranged in the order of + // [frame][height][width] by default + // selectedIndex[1] = rank-1; // width, the fastest dimension + // selectedIndex[0] = rank-2; // height + // selectedIndex[2] = rank-3; // frames + + // + // (5/4/09) Modified the default dimension order. See bug#1379 + // We change the default order to the following. In most situation, + // users want to use the natural order of + // selectedIndex[0] = 0 + // selectedIndex[1] = 1 + // selectedIndex[2] = 2 + // Most of NPOESS data is the the order above. + + selectedIndex[0] = 0; // width, the fastest dimension + selectedIndex[1] = 1; // height + selectedIndex[2] = 2; // frames + + selectedDims[selectedIndex[0]] = dims[selectedIndex[0]]; + selectedDims[selectedIndex[1]] = dims[selectedIndex[1]]; + selectedDims[selectedIndex[2]] = dims[selectedIndex[2]]; + } + + log.trace("resetSelection(): finish"); + } + + /** + * set a property for the attribute. + * + * @param key the attribute Map key + * @param value the attribute Map value + */ + public void setProperty(String key, Object value) + { + properties.put(key, value); + } + + /** + * get a property for a given key. + * + * @param key the attribute Map key + * + * @return the property + */ + public Object getProperty(String key) + { + return properties.get(key); + } + + /** + * get all property keys. + * + * @return the Collection of property keys + */ + public Collection getPropertyKeys() + { + return properties.keySet(); + } + + /** + * @return true if the data is a single scalar point; otherwise, returns + * false. + */ + public boolean isScalar() { + return isScalar; + } + + @Override + public Object read() throws Exception, OutOfMemoryError { + log.trace("read(): start"); + if (!inited) init(); + + /* + * TODO: For now, convert a compound Attribute's data (String[]) into a List for + * convenient processing + */ + if (getDatatype().isCompound() && !(data instanceof List)) { + List valueList = Arrays.asList((String[]) data); + + data = valueList; + } + + log.trace("read(): finish"); + return data; + } + + @Override + public void write(Object buf) throws Exception { + log.trace("write(): start"); + + if (!buf.equals(data)) + setData(buf); + + if (!inited) init(); + + if (parentObject == null) { + log.debug("write(): parent object is null; nowhere to write attribute to"); + log.debug("write(): finish"); + return; + } + + ((MetaDataContainer) getParentObject()).writeMetadata(this); + + log.trace("write(): finish"); + } + + /** + * Returns the number of members of the compound attribute. + * + * @return the number of members of the compound attribute. + */ + @Override + public int getMemberCount() { + return numberOfMembers; + } + + /** + * Returns the number of selected members of the compound attribute. + * + * Selected members are the compound fields which are selected for read/write. + *

+ * For example, in a compound datatype of {int A, float B, char[] C}, users can + * choose to retrieve only {A, C} from the attribute. In this case, + * getSelectedMemberCount() returns two. + * + * @return the number of selected members. + */ + @Override + public int getSelectedMemberCount() { + int count = 0; + + if (isMemberSelected != null) { + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + count++; + } + } + } + + log.trace("getSelectedMemberCount(): count of selected members={}", count); + + return count; + } + + /** + * Returns the names of the members of the compound attribute. The names of + * compound members are stored in an array of Strings. + *

+ * For example, for a compound datatype of {int A, float B, char[] C} + * getMemberNames() returns ["A", "B", "C"}. + * + * @return the names of compound members. + */ + @Override + public String[] getMemberNames() { + return memberNames; + } + + /** + * Returns an array of the names of the selected members of the compound dataset. + * + * @return an array of the names of the selected members of the compound dataset. + */ + public final String[] getSelectedMemberNames() { + if (isMemberSelected == null) { + log.debug("getSelectedMemberNames(): isMemberSelected array is null"); + log.trace("getSelectedMemberNames(): finish"); + return memberNames; + } + + int idx = 0; + String[] names = new String[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + names[idx++] = memberNames[i]; + } + } + + return names; + } + + /** + * Checks if a member of the compound attribute is selected for read/write. + * + * @param idx + * the index of compound member. + * + * @return true if the i-th memeber is selected; otherwise returns false. + */ + @Override + public boolean isMemberSelected(int idx) { + if ((isMemberSelected != null) && (isMemberSelected.length > idx)) { + return isMemberSelected[idx]; + } + + return false; + } + + /** + * Selects the i-th member for read/write. + * + * @param idx + * the index of compound member. + */ + @Override + public void selectMember(int idx) { + if ((isMemberSelected != null) && (isMemberSelected.length > idx)) { + isMemberSelected[idx] = true; + } + } + + /** + * Selects/deselects all members. + * + * @param selectAll + * The indicator to select or deselect all members. If true, all + * members are selected for read/write. If false, no member is + * selected for read/write. + */ + @Override + public void setAllMemberSelection(boolean selectAll) { + if (isMemberSelected == null) { + return; + } + + for (int i = 0; i < isMemberSelected.length; i++) { + isMemberSelected[i] = selectAll; + } + } + + /** + * Returns array containing the total number of elements of the members of the + * compound attribute. + *

+ * For example, a compound attribute COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberOrders() will return an integer array of {1, 5, 6} to indicate that + * member A has one element, member B has 5 elements, and member C has 6 + * elements. + * + * @return the array containing the total number of elements of the members of + * the compound attribute. + */ + @Override + public int[] getMemberOrders() { + return memberOrders; + } + + /** + * Returns array containing the total number of elements of the selected members + * of the compound attribute. + * + *

+ * For example, a compound attribute COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * If A and B are selected, getSelectedMemberOrders() returns an array of {1, 5} + * + * @return array containing the total number of elements of the selected members + * of the compound attribute. + */ + @Override + public int[] getSelectedMemberOrders() { + log.trace("getSelectedMemberOrders(): start"); + + if (isMemberSelected == null) { + log.debug("getSelectedMemberOrders(): isMemberSelected array is null"); + log.trace("getSelectedMemberOrders(): finish"); + return memberOrders; + } + + int idx = 0; + int[] orders = new int[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + orders[idx++] = memberOrders[i]; + } + } + + log.trace("getSelectedMemberOrders(): finish"); + + return orders; + } + + /** + * Returns the dimension sizes of the i-th member. + *

+ * For example, a compound attribute COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1) returns + * an array of {5}, and getMemberDims(0) returns null. + * + * @param i + * the i-th member + * + * @return the dimension sizes of the i-th member, null if the compound member + * is not an array. + */ + @Override + public int[] getMemberDims(int i) { + if (memberDims == null) { + return null; + } + + return (int[]) memberDims[i]; + } + + /** + * Returns an array of datatype objects of compound members. + *

+ * Each member of a compound attribute has its own datatype. The datatype of a + * member can be atomic or other compound datatype (nested compound). The + * datatype objects are setup at init(). + *

+ * + * @return the array of datatype objects of the compound members. + */ + @Override + public Datatype[] getMemberTypes() { + return memberTypes; + } + + /** + * Returns an array of datatype objects of selected compound members. + * + * @return an array of datatype objects of selected compound members. + */ + @Override + public Datatype[] getSelectedMemberTypes() { + log.trace("getSelectedMemberTypes(): start"); + + if (isMemberSelected == null) { + log.debug("getSelectedMemberTypes(): isMemberSelected array is null"); + log.trace("getSelectedMemberTypes(): finish"); + return memberTypes; + } + + int idx = 0; + Datatype[] types = new Datatype[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + types[idx++] = memberTypes[i]; + } + } + + log.trace("getSelectedMemberTypes(): finish"); + + return types; + } + + + @SuppressWarnings("rawtypes") + @Override + public List getMetadata() throws Exception { + throw new UnsupportedOperationException("Attribute:getMetadata Unsupported operation."); + } + + @Override + public void writeMetadata(Object metadata) throws Exception { + throw new UnsupportedOperationException("Attribute:writeMetadata Unsupported operation."); + } + + @Override + public void removeMetadata(Object metadata) throws Exception { + throw new UnsupportedOperationException("Attribute:removeMetadata Unsupported operation."); + } + + @Override + public void updateMetadata(Object metadata) throws Exception { + throw new UnsupportedOperationException("Attribute:updateMetadata Unsupported operation."); + } + + @Override + public boolean hasAttribute() { + return false; + } + + @Override + public final Datatype getDatatype() { + return datatype; + } + + @Override + public byte[] readBytes() throws Exception { + throw new UnsupportedOperationException("Attribute:readBytes Unsupported operation."); + } + + @Override + public Dataset copy(Group pgroup, String name, long[] dims, Object data) throws Exception { + throw new UnsupportedOperationException("Attribute:copy Unsupported operation."); + } + + /** + * Returns whether this Attribute is equal to the specified HObject by comparing + * various properties. + * + * @param obj + * The object + * + * @return true if the object is equal + */ + @Override + public boolean equals(Object obj) { + if (obj == null) + return false; + + // checking if both the object references are + // referring to the same object. + if (this == obj) + return true; + if (obj instanceof Attribute) { + if (!this.getFullName().equals(((Attribute) obj).getFullName())) + return false; + + if (!this.getFileFormat().equals(((Attribute) obj).getFileFormat())) + return false; + + if (!Arrays.equals(this.getDims(), ((DataFormat) obj).getDims())) + return false; + + return (this.getParentObject().equals(((Attribute) obj).getParentObject())); + } + return false; + } + + @Override + public int hashCode() { + + // We are returning the OID as a hashcode value. + return super.hashCode(); + } + + /** + * Returns a string representation of the data value of the attribute. For + * example, "0, 255". + *

+ * For a compound datatype, it will be a 1D array of strings with field + * members separated by the delimiter. For example, + * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, + * float} of three data points. + *

+ * + * @param delimiter + * The delimiter used to separate individual data points. It + * can be a comma, semicolon, tab or space. For example, + * toString(",") will separate data by commas. + * + * @return the string representation of the data values. + */ + public String toString(String delimiter) { + return toString(delimiter, -1); + } + + /** + * Returns a string representation of the data value of the attribute. For + * example, "0, 255". + *

+ * For a compound datatype, it will be a 1D array of strings with field + * members separated by the delimiter. For example, + * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, + * float} of three data points. + *

+ * + * @param delimiter + * The delimiter used to separate individual data points. It + * can be a comma, semicolon, tab or space. For example, + * toString(",") will separate data by commas. + * @param maxItems + * The maximum number of Array values to return + * + * @return the string representation of the data values. + */ + public String toString(String delimiter, int maxItems) { + log.trace("toString(): start"); + + if (data == null) { + log.debug("toString(): value is null"); + log.trace("toString(): finish"); + return null; + } + + Class valClass = data.getClass(); + + if (!valClass.isArray()) { + log.trace("toString(): finish - not array"); + String strValue = data.toString(); + if (maxItems > 0 && strValue.length() > maxItems) { + // truncate the extra characters + strValue = strValue.substring(0, maxItems); + } + return strValue; + } + + // attribute value is an array + StringBuilder sb = new StringBuilder(); + int n = Array.getLength(data); + if ((maxItems > 0) && (n > maxItems)) + n = maxItems; + + log.trace("toString: is_enum={} is_unsigned={} Array.getLength={}", getDatatype().isEnum(), + getDatatype().isUnsigned(), n); + + if (getDatatype().isEnum()) { + String cname = valClass.getName(); + char dname = cname.charAt(cname.lastIndexOf('[') + 1); + log.trace("toString: is_enum with cname={} dname={}", cname, dname); + + Map map = this.getDatatype().getEnumMembers(); + String theValue = null; + switch (dname) { + case 'B': + byte[] barray = (byte[]) data; + short sValue = barray[0]; + theValue = String.valueOf(sValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(sValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + sValue = barray[i]; + theValue = String.valueOf(sValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(sValue); + } + break; + case 'S': + short[] sarray = (short[]) data; + int iValue = sarray[0]; + theValue = String.valueOf(iValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(iValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + iValue = sarray[i]; + theValue = String.valueOf(iValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(iValue); + } + break; + case 'I': + int[] iarray = (int[]) data; + long lValue = iarray[0]; + theValue = String.valueOf(lValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(lValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + lValue = iarray[i]; + theValue = String.valueOf(lValue); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(lValue); + } + break; + case 'J': + long[] larray = (long[]) data; + Long l = larray[0]; + theValue = Long.toString(l); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(theValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + l = larray[i]; + theValue = Long.toString(l); + if (map.containsKey(theValue)) { + sb.append(map.get(theValue)); + } + else + sb.append(theValue); + } + break; + default: + sb.append(Array.get(data, 0)); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + sb.append(Array.get(data, i)); + } + break; + } + } + else if (getDatatype().isUnsigned()) { + String cname = valClass.getName(); + char dname = cname.charAt(cname.lastIndexOf('[') + 1); + log.trace("toString: is_unsigned with cname={} dname={}", cname, dname); + + switch (dname) { + case 'B': + byte[] barray = (byte[]) data; + short sValue = barray[0]; + if (sValue < 0) { + sValue += 256; + } + sb.append(sValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + sValue = barray[i]; + if (sValue < 0) { + sValue += 256; + } + sb.append(sValue); + } + break; + case 'S': + short[] sarray = (short[]) data; + int iValue = sarray[0]; + if (iValue < 0) { + iValue += 65536; + } + sb.append(iValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + iValue = sarray[i]; + if (iValue < 0) { + iValue += 65536; + } + sb.append(iValue); + } + break; + case 'I': + int[] iarray = (int[]) data; + long lValue = iarray[0]; + if (lValue < 0) { + lValue += 4294967296L; + } + sb.append(lValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + lValue = iarray[i]; + if (lValue < 0) { + lValue += 4294967296L; + } + sb.append(lValue); + } + break; + case 'J': + long[] larray = (long[]) data; + Long l = larray[0]; + String theValue = Long.toString(l); + if (l < 0) { + l = (l << 1) >>> 1; + BigInteger big1 = new BigInteger("9223372036854775808"); // 2^65 + BigInteger big2 = new BigInteger(l.toString()); + BigInteger big = big1.add(big2); + theValue = big.toString(); + } + sb.append(theValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + l = larray[i]; + theValue = Long.toString(l); + if (l < 0) { + l = (l << 1) >>> 1; + BigInteger big1 = new BigInteger("9223372036854775808"); // 2^65 + BigInteger big2 = new BigInteger(l.toString()); + BigInteger big = big1.add(big2); + theValue = big.toString(); + } + sb.append(theValue); + } + break; + default: + String strValue = Array.get(data, 0).toString(); + if (maxItems > 0 && strValue.length() > maxItems) { + // truncate the extra characters + strValue = strValue.substring(0, maxItems); + } + sb.append(strValue); + for (int i = 1; i < n; i++) { + sb.append(delimiter); + strValue = Array.get(data, i).toString(); + if (maxItems > 0 && strValue.length() > maxItems) { + // truncate the extra characters + strValue = strValue.substring(0, maxItems); + } + sb.append(strValue); + } + break; + } + } + else { + log.trace("toString: not enum or unsigned"); + Object value = Array.get(data, 0); + String strValue; + + if (value == null) { + strValue = "null"; + } + else { + strValue = value.toString(); + } + + if (maxItems > 0 && strValue.length() > maxItems) { + // truncate the extra characters + strValue = strValue.substring(0, maxItems); + } + sb.append(strValue); + + for (int i = 1; i < n; i++) { + sb.append(delimiter); + value = Array.get(data, i); + + if (value == null) { + strValue = "null"; + } + else { + strValue = value.toString(); + } + + if (maxItems > 0 && strValue.length() > maxItems) { + // truncate the extra characters + strValue = strValue.substring(0, maxItems); + } + sb.append(strValue); + } + } + + log.trace("toString: finish"); + return sb.toString(); + } + + /** + * Given an array of bytes representing a compound Datatype and a start index + * and length, converts len number of bytes into the correct Object type and + * returns it. + * + * @param data + * The byte array representing the data of the compound Datatype + * @param data_type + * The type of data to convert the bytes to + * @param start + * The start index of the bytes to get + * @param len + * The number of bytes to convert + * @return The converted type of the bytes + */ + private Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) { + Object currentData = null; + + try { + long typeClass = H5.H5Tget_class(data_type); + + if (typeClass == HDF5Constants.H5T_INTEGER) { + long size = H5.H5Tget_size(data_type); + + currentData = HDFNativeData.byteToInt((int) start, (int) (len / size), data); + } + else if (typeClass == HDF5Constants.H5T_FLOAT) { + currentData = HDFNativeData.byteToDouble((int) start, 1, data); + } + } + catch (Exception ex) { + log.debug("convertCompoundByteMember(): conversion failure: ", ex); + } + + return currentData; + } +} diff --git a/src/main/java/hdf/object/CompoundDS.java b/src/main/java/hdf/object/CompoundDS.java new file mode 100644 index 0000000..0bd61cd --- /dev/null +++ b/src/main/java/hdf/object/CompoundDS.java @@ -0,0 +1,445 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +/** + * A CompoundDS is a dataset with compound datatype. + *

+ * A compound datatype is an aggregation of one or more datatypes. Each member + * of a compound type has a name which is unique within that type, and a + * datatype of that member in a compound datum. Compound datatypes can be nested, + * i.e. members of a compound datatype can be some other compound datatype. + *

+ * For more details on compound datatypes, + * see HDF5 User's Guide + *

+ * Since Java cannot handle C-structured compound data, data in a compound dataset + * is loaded in to an Java List. Each element of the list is a data array that + * corresponds to a compound field. The data is read/written by compound field. + *

+ * For example, if compound dataset "comp" has the following nested structure, + * and member datatypes + * + *

+ * comp --> m01 (int)
+ * comp --> m02 (float)
+ * comp --> nest1 --> m11 (char)
+ * comp --> nest1 --> m12 (String)
+ * comp --> nest1 --> nest2 --> m21 (long)
+ * comp --> nest1 --> nest2 --> m22 (double)
+ * 
+ * + * The data object is a Java list of six arrays: {int[], float[], char[], + * Stirng[], long[] and double[]}. + * + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public abstract class CompoundDS extends Dataset implements CompoundDataFormat { + private static final long serialVersionUID = -4880399929644095662L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(CompoundDS.class); + + /** + * A single character to separate the names of nested compound fields. An + * extended ASCII character, 0x95, is used to avoid common characters in + * compound names. + */ + public static final String SEPARATOR = "\u0095"; + + /** + * The number of members of the compound dataset. + */ + protected int numberOfMembers; + + /** + * The names of members of the compound dataset. + */ + protected String[] memberNames; + + /** + * Returns array containing the total number of elements of the members of + * this compound dataset. + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * memberOrders is an integer array of {1, 5, 6} to indicate that member A + * has one element, member B has 5 elements, and member C has 6 elements. + */ + protected int[] memberOrders; + + /** + * The dimension sizes of each member. + *

+ * The i-th element of the Object[] is an integer array (int[]) that + * contains the dimension sizes of the i-th member. + */ + protected transient Object[] memberDims; + + /** + * The datatypes of compound members. + */ + protected Datatype[] memberTypes; + + /** + * The array to store flags to indicate if a member of this compound + * dataset is selected for read/write. + *

+ * If a member is selected, the read/write will perform on the member. + * Applications such as HDFView will only display the selected members of + * the compound dataset. + * + *

+     * For example, if a compound dataset has four members
+     *     String[] memberNames = {"X", "Y", "Z", "TIME"};
+     * and
+     *     boolean[] isMemberSelected = {true, false, false, true};
+     * members "X" and "TIME" are selected for read and write.
+     * 
+ */ + protected boolean[] isMemberSelected; + + /** + * Constructs a CompoundDS object with the given file, dataset name and path. + *

+ * The dataset object represents an existing dataset in the file. For + * example, new H5CompoundDS(file, "dset1", "/g0/") constructs a dataset + * object that corresponds to the dataset, "dset1", at group "/g0/". + *

+ * This object is usually constructed at FileFormat.open(), which loads the + * file structure and object information into memory. It is rarely used + * elsewhere. + * + * @param theFile + * the file that contains the dataset. + * @param dsName + * the name of the CompoundDS, e.g. "compDS". + * @param dsPath + * the full path of the CompoundDS, e.g. "/g1". + */ + public CompoundDS(FileFormat theFile, String dsName, String dsPath) { + this(theFile, dsName, dsPath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #CompoundDS(FileFormat, String, String)} + * + * @param theFile + * the file that contains the dataset. + * @param dsName + * the name of the CompoundDS, e.g. "compDS". + * @param dsPath + * the full path of the CompoundDS, e.g. "/g1". + * @param oid + * the oid of the CompoundDS. + */ + @Deprecated + public CompoundDS(FileFormat theFile, String dsName, String dsPath, long[] oid) { + super(theFile, dsName, dsPath, oid); + + numberOfMembers = 0; + memberNames = null; + isMemberSelected = null; + memberTypes = null; + } + + /** + * Returns the number of members of the compound dataset. + * + * @return the number of members of the compound dataset. + */ + @Override + public final int getMemberCount() { + return numberOfMembers; + } + + /** + * Returns the number of selected members of the compound dataset. + * + * Selected members are the compound fields which are selected for + * read/write. + *

+ * For example, in a compound datatype of {int A, float B, char[] C}, + * users can choose to retrieve only {A, C} from the dataset. In this + * case, getSelectedMemberCount() returns two. + * + * @return the number of selected members. + */ + @Override + public final int getSelectedMemberCount() { + int count = 0; + + if (isMemberSelected != null) { + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + count++; + } + } + } + log.trace("count of selected members={}", count); + + return count; + } + + /** + * Returns the names of the members of the compound dataset. The names of + * compound members are stored in an array of Strings. + *

+ * For example, for a compound datatype of {int A, float B, char[] C} + * getMemberNames() returns ["A", "B", "C"}. + * + * @return the names of compound members. + */ + @Override + public final String[] getMemberNames() { + return memberNames; + } + + /** + * Returns an array of the names of the selected members of the compound dataset. + * + * @return an array of the names of the selected members of the compound dataset. + */ + public final String[] getSelectedMemberNames() { + if (isMemberSelected == null) { + log.debug("getSelectedMemberNames(): isMemberSelected array is null"); + log.trace("getSelectedMemberNames(): finish"); + return memberNames; + } + + int idx = 0; + String[] names = new String[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + names[idx++] = memberNames[i]; + } + } + + return names; + } + + /** + * Checks if a member of the compound dataset is selected for read/write. + * + * @param idx + * the index of compound member. + * + * @return true if the i-th memeber is selected; otherwise returns false. + */ + @Override + public final boolean isMemberSelected(int idx) { + if ((isMemberSelected != null) && (isMemberSelected.length > idx)) { + return isMemberSelected[idx]; + } + else { + return false; + } + } + + /** + * Selects the i-th member for read/write. + * + * @param idx + * the index of compound member. + */ + @Override + public final void selectMember(int idx) { + if ((isMemberSelected != null) && (isMemberSelected.length > idx)) { + isMemberSelected[idx] = true; + } + } + + /** + * Selects/deselects all members. + * + * @param selectAll + * The indicator to select or deselect all members. If true, all + * members are selected for read/write. If false, no member is + * selected for read/write. + */ + @Override + public final void setAllMemberSelection(boolean selectAll) { + if (isMemberSelected == null) { + return; + } + + for (int i = 0; i < isMemberSelected.length; i++) { + isMemberSelected[i] = selectAll; + } + } + + /** + * Returns array containing the total number of elements of the members of + * the compound dataset. + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberOrders() will return an integer array of {1, 5, 6} to indicate + * that member A has one element, member B has 5 elements, and member C has + * 6 elements. + * + * @return the array containing the total number of elements of the members + * of compound. + */ + @Override + public final int[] getMemberOrders() { + return memberOrders; + } + + /** + * Returns array containing the total number of elements of the selected + * members of the compound dataset. + * + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * If A and B are selected, getSelectedMemberOrders() returns an array of + * {1, 5} + * + * @return array containing the total number of elements of the selected + * members of compound. + */ + @Override + public final int[] getSelectedMemberOrders() { + log.trace("getSelectedMemberOrders(): start"); + + if (isMemberSelected == null) { + log.debug("getSelectedMemberOrders(): isMemberSelected array is null"); + log.trace("getSelectedMemberOrders(): finish"); + return memberOrders; + } + + int idx = 0; + int[] orders = new int[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + orders[idx++] = memberOrders[i]; + } + } + + log.trace("getSelectedMemberOrders(): finish"); + + return orders; + } + + /** + * Returns the dimension sizes of the i-th member. + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1) + * returns an array of {5}, and getMemberDims(0) returns null. + * + * @param i the i-th member + * + * @return the dimension sizes of the i-th member, null if the compound + * member is not an array. + */ + @Override + public final int[] getMemberDims(int i) { + if (memberDims == null) { + return null; + } + return (int[]) memberDims[i]; + } + + /** + * Returns an array of datatype objects of compound members. + *

+ * Each member of a compound dataset has its own datatype. The datatype of a + * member can be atomic or other compound datatype (nested compound). + * Sub-classes set up the datatype objects at init(). + *

+ * + * @return the array of datatype objects of the compound members. + */ + @Override + public final Datatype[] getMemberTypes() { + return memberTypes; + } + + /** + * Returns an array of datatype objects of selected compound members. + * + * @return an array of datatype objects of selected compound members. + */ + @Override + public final Datatype[] getSelectedMemberTypes() { + log.trace("getSelectedMemberTypes(): start"); + + if (isMemberSelected == null) { + log.debug("getSelectedMemberTypes(): isMemberSelected array is null"); + log.trace("getSelectedMemberTypes(): finish"); + return memberTypes; + } + + int idx = 0; + Datatype[] types = new Datatype[getSelectedMemberCount()]; + for (int i = 0; i < isMemberSelected.length; i++) { + if (isMemberSelected[i]) { + types[idx++] = memberTypes[i]; + } + } + + log.trace("getSelectedMemberTypes(): finish"); + + return types; + } + + /** + * @deprecated Not implemented for compound dataset. + */ + @Deprecated + @Override + public Dataset copy(Group pgroup, String name, long[] dims, Object data) + throws Exception { + throw new UnsupportedOperationException( + "Writing a subset of a compound dataset to a new dataset is not implemented."); + } +} diff --git a/src/main/java/hdf/object/CompoundDataFormat.java b/src/main/java/hdf/object/CompoundDataFormat.java new file mode 100644 index 0000000..a42cad2 --- /dev/null +++ b/src/main/java/hdf/object/CompoundDataFormat.java @@ -0,0 +1,184 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +/** + * An interface that provides general operations for data with a Compound + * datatype. For example, getting the names, dataspaces or datatypes of the + * members of the Compound datatype. + *

+ * + * @see hdf.object.HObject + * + * @version 1.0 5/3/2018 + * @author Jordan T. Henderson + */ +public interface CompoundDataFormat extends DataFormat { + + /** + * Returns the number of members of the compound data object. + * + * @return the number of members of the compound data object. + */ + public abstract int getMemberCount(); + + /** + * Returns the number of selected members of the compound data object. + * + * Selected members are the compound fields which are selected for read/write. + *

+ * For example, in a compound datatype of {int A, float B, char[] C}, users can + * choose to retrieve only {A, C} from the data object. In this case, + * getSelectedMemberCount() returns two. + * + * @return the number of selected members. + */ + public abstract int getSelectedMemberCount(); + + /** + * Returns the names of the members of the compound data object. The names of + * compound members are stored in an array of Strings. + *

+ * For example, for a compound datatype of {int A, float B, char[] C} + * getMemberNames() returns ["A", "B", "C"}. + * + * @return the names of compound members. + */ + public abstract String[] getMemberNames(); + + /** + * Returns an array of the names of the selected compound members. + * + * @return an array of the names of the selected compound members. + */ + public abstract String[] getSelectedMemberNames(); + + /** + * Checks if a member of the compound data object is selected for read/write. + * + * @param idx + * the index of compound member. + * + * @return true if the i-th memeber is selected; otherwise returns false. + */ + public abstract boolean isMemberSelected(int idx); + + /** + * Selects the i-th member for read/write. + * + * @param idx + * the index of compound member. + */ + public abstract void selectMember(int idx); + + /** + * Selects/deselects all members. + * + * @param selectAll + * The indicator to select or deselect all members. If true, all + * members are selected for read/write. If false, no member is + * selected for read/write. + */ + public abstract void setAllMemberSelection(boolean selectAll); + + /** + * Returns array containing the total number of elements of the members of the + * compound data object. + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberOrders() will return an integer array of {1, 5, 6} to indicate that + * member A has one element, member B has 5 elements, and member C has 6 + * elements. + * + * @return the array containing the total number of elements of the members of + * the compound data object. + */ + public abstract int[] getMemberOrders(); + + /** + * Returns array containing the total number of elements of the selected members + * of the compound data object. + * + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * If A and B are selected, getSelectedMemberOrders() returns an array of {1, 5} + * + * @return array containing the total number of elements of the selected members + * of the compound data object. + */ + public abstract int[] getSelectedMemberOrders(); + + /** + * Returns the dimension sizes of the i-th member. + *

+ * For example, a compound dataset COMP has members of A, B and C as + * + *

+     *     COMP {
+     *         int A;
+     *         float B[5];
+     *         double C[2][3];
+     *     }
+     * 
+ * + * getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1) returns + * an array of {5}, and getMemberDims(0) returns null. + * + * @param i + * the i-th member + * + * @return the dimension sizes of the i-th member, null if the compound member + * is not an array. + */ + public abstract int[] getMemberDims(int i); + + /** + * Returns an array of datatype objects of the compound members. + *

+ * Each member of a compound data object has its own datatype. The datatype of a + * member can be atomic or other compound datatype (nested compound). The + * datatype objects are setup at init(). + *

+ * + * @return the array of datatype objects of the compound members. + */ + public abstract Datatype[] getMemberTypes(); + + /** + * Returns an array of datatype objects of the selected compound members. + * + * @return an array of datatype objects of the selected compound members. + */ + public abstract Datatype[] getSelectedMemberTypes(); + +} diff --git a/src/main/java/hdf/object/DataFormat.java b/src/main/java/hdf/object/DataFormat.java new file mode 100644 index 0000000..40704f8 --- /dev/null +++ b/src/main/java/hdf/object/DataFormat.java @@ -0,0 +1,366 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +/** + * An interface that provides general I/O operations for object data. For + * example, reading data content from the file into memory or writing data + * content from memory into the file. + *

+ * + * @see hdf.object.HObject + * + * @version 1.0 4/2/2018 + * @author Jordan T. Henderson + */ +public interface DataFormat { + public abstract boolean isInited(); + + public abstract void init(); + + /** + * Retrieves the object's data from the file. + * + * @return the object's data. + * + * @throws Exception + * if the data can not be retrieved + */ + public abstract Object getData() throws Exception, OutOfMemoryError; + + /** + * + * + * @param data + * the data to write. + */ + public abstract void setData(Object data); + + /** + * Clears the current data buffer in memory and forces the next read() to load + * the data from file. + *

+ * The function read() loads data from file into memory only if the data is not + * read. If data is already in memory, read() just returns the memory buffer. + * Sometimes we want to force read() to re-read data from file. For example, + * when the selection is changed, we need to re-read the data. + * + * @see #getData() + * @see #read() + */ + public abstract void clearData(); + + /** + * Reads the data from file. + *

+ * read() reads the data from file to a memory buffer and returns the memory + * buffer. The dataset object does not hold the memory buffer. To store the + * memory buffer in the dataset object, one must call getData(). + *

+ * By default, the whole dataset is read into memory. Users can also select + * a subset to read. Subsetting is done in an implicit way. + * + * @return the data read from file. + * + * @see #getData() + * + * @throws Exception + * if object can not be read + * @throws OutOfMemoryError + * if memory is exhausted + */ + public abstract Object read() throws Exception, OutOfMemoryError; + + /** + * Writes a memory buffer to the object in the file. + * + * @param buf + * the data to write + * + * @throws Exception + * if data can not be written + */ + public abstract void write(Object buf) throws Exception; + + /** + * Writes the current memory buffer to the object in the file. + * + * @throws Exception + * if data can not be written + */ + public abstract void write() throws Exception; + + /** + * Converts the data values of this data object to appropriate Java integers if + * they are unsigned integers. + * + * @see hdf.object.Dataset#convertToUnsignedC(Object) + * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) + * + * @return the converted data buffer. + */ + public Object convertFromUnsignedC(); + + /** + * Converts Java integer data values of this data object back to unsigned C-type + * integer data if they are unsigned integers. + * + * @see hdf.object.Dataset#convertToUnsignedC(Object) + * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) + * + * @return the converted data buffer. + */ + public Object convertToUnsignedC(); + + /** + * Returns the fill values for the data object. + * + * @return the fill values for the data object. + */ + public abstract Object getFillValue(); + + /** + * Returns the datatype of the data object. + * + * @return the datatype of the data object. + */ + public abstract Datatype getDatatype(); + + /** + * Returns the rank (number of dimensions) of the data object. It returns a + * negative number if it failed to retrieve the dimension information from + * the file. + * + * @return the number of dimensions of the data object. + */ + public abstract int getRank(); + + /** + * Returns the array that contains the dimension sizes of the data value of + * the data object. It returns null if it failed to retrieve the dimension + * information from the file. + * + * @return the dimension sizes of the data object. + */ + public abstract long[] getDims(); + + + /**************************************************************** + * * The following four definitions are used for data subsetting. * * + ****************************************************************/ + + /** + * Returns the dimension sizes of the selected subset. + *

+ * The SelectedDims is the number of data points of the selected subset. + * Applications can use this array to change the size of selected subset. + * + * The selected size must be less than or equal to the current dimension size. + * Combined with the starting position, selected sizes and stride, the subset of + * a rectangle selection is fully defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {1, 2};
+     * long[] selectedDims = {3, 3};
+     * long[] selectedStride = {1, 1};
+     * then the following subset is selected by the startDims and selectedDims
+     *     12, 13, 14
+     *     22, 23, 24
+     *     32, 33, 34
+     * 
+ * + * @return the dimension sizes of the selected subset. + */ + public abstract long[] getSelectedDims(); + + /** + * Returns the starting position of a selected subset. + *

+ * Applications can use this array to change the starting position of a + * selection. Combined with the selected dimensions, selected sizes and stride, + * the subset of a rectangle selection is fully defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {1, 2};
+     * long[] selectedDims = {3, 3};
+     * long[] selectedStride = {1, 1};
+     * then the following subset is selected by the startDims and selectedDims
+     *     12, 13, 14
+     *     22, 23, 24
+     *     32, 33, 34
+     * 
+ * + * @return the starting position of a selected subset. + */ + public abstract long[] getStartDims(); + + /** + * Returns the selectedStride of the selected dataset. + *

+ * Applications can use this array to change how many elements to move in each + * dimension. + * + * Combined with the starting position and selected sizes, the subset of a + * rectangle selection is defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {0, 0};
+     * long[] selectedDims = {2, 2};
+     * long[] selectedStride = {2, 3};
+     * then the following subset is selected by the startDims and selectedDims
+     *     0,   3
+     *     20, 23
+     * 
+ * + * @return the selectedStride of the selected dataset. + */ + public abstract long[] getStride(); + + /** + * Returns the indices of display order. + *

+ * + * selectedIndex[] is provided for two purposes: + *

    + *
  1. selectedIndex[] is used to indicate the order of dimensions for display. + * selectedIndex[0] is for the row, selectedIndex[1] is for the column and + * selectedIndex[2] for the depth. + *

    + * For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3}, + * then dim[1] is selected as row index, dim[2] is selected as column index and + * dim[3] is selected as depth index. + *

  2. selectedIndex[] is also used to select dimensions for display for + * datasets with three or more dimensions. We assume that applications such as + * HDFView can only display data values up to three dimensions (2D + * spreadsheet/image with a third dimension which the 2D spreadsheet/image is + * selected from). For datasets with more than three dimensions, we need + * selectedIndex[] to tell applications which three dimensions are chosen for + * display.
    + * For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3}, + * then dim[1] is selected as row index, dim[2] is selected as column index and + * dim[3] is selected as depth index. dim[0] is not selected. Its location is + * fixed at 0 by default. + *
+ * + * @return the array of the indices of display order. + */ + public int[] getSelectedIndex(); + + /************************************************************************** + * * The following two definitions are used primarily for GUI applications. * * + **************************************************************************/ + + /** + * Returns the dimension size of the vertical axis. + * + *

+ * This function is used by GUI applications such as HDFView. GUI applications + * display a dataset in a 2D table or 2D image. The display order is specified + * by the index array of selectedIndex as follow: + *

+ *
selectedIndex[0] -- height
+ *
The vertical axis
+ *
selectedIndex[1] -- width
+ *
The horizontal axis
+ *
selectedIndex[2] -- depth
+ *
The depth axis is used for 3 or more dimensional datasets.
+ *
+ * Applications can use getSelectedIndex() to access and change the display + * order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the + * following code will set the height=200 and width=50. + * + *
+     * int[] selectedIndex = dataset.getSelectedIndex();
+     * selectedIndex[0] = 0;
+     * selectedIndex[1] = 1;
+     * 
+ * + * @see #getSelectedIndex() + * @see #getWidth() + * + * @return the size of dimension of the vertical axis. + */ + public long getHeight(); + + /** + * Returns the dimension size of the horizontal axis. + * + *

+ * This function is used by GUI applications such as HDFView. GUI applications + * display a dataset in 2D Table or 2D Image. The display order is specified by + * the index array of selectedIndex as follow: + *

+ *
selectedIndex[0] -- height
+ *
The vertical axis
+ *
selectedIndex[1] -- width
+ *
The horizontal axis
+ *
selectedIndex[2] -- depth
+ *
The depth axis, which is used for 3 or more dimension datasets.
+ *
+ * Applications can use getSelectedIndex() to access and change the display + * order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the + * following code will set the height=200 and width=100. + * + *
+     * int[] selectedIndex = dataset.getSelectedIndex();
+     * selectedIndex[0] = 0;
+     * selectedIndex[1] = 1;
+     * 
+ * + * @see #getSelectedIndex() + * @see #getHeight() + * + * @return the size of dimension of the horizontal axis. + */ + public long getWidth(); + + /** + * Returns the string representation of compression information. + *

+ * For example, "SZIP: Pixels per block = 8: H5Z_FILTER_CONFIG_DECODE_ENABLED". + * + * @return the string representation of compression information. + */ + public abstract String getCompression(); + + /** + * Get runtime Class of the original data buffer if converted. + * + * @return the Class of the original data buffer + */ + @SuppressWarnings("rawtypes") + public abstract Class getOriginalClass(); +} diff --git a/src/main/java/hdf/object/Dataset.java b/src/main/java/hdf/object/Dataset.java new file mode 100644 index 0000000..0e6f487 --- /dev/null +++ b/src/main/java/hdf/object/Dataset.java @@ -0,0 +1,1297 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.lang.reflect.Array; +import java.util.List; + +/** + * The abstract class provides general APIs to create and manipulate dataset + * objects, and retrieve dataset properties, datatype and dimension sizes. + *

+ * This class provides two convenient functions, read()/write(), to read/write + * data values. Reading/writing data may take many library calls if we use the + * library APIs directly. The read() and write functions hide all the details of + * these calls from users. + *

+ * For more details on dataset, + * see HDF5 User's Guide + *

+ * + * @see hdf.object.ScalarDS + * @see CompoundDS + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public abstract class Dataset extends HObject implements MetaDataContainer, DataFormat { + private static final long serialVersionUID = -3360885430038261178L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Dataset.class); + + /** + * The memory buffer that holds the raw data array of the dataset. + */ + protected transient Object data; + + /** + * The number of dimensions of the dataset. + */ + protected int rank; + + /** + * The current dimension sizes of the dataset + */ + protected long[] dims; + + /** + * The max dimension sizes of the dataset + */ + protected long[] maxDims; + + /** + * Array that contains the number of data points selected (for read/write) + * in each dimension. + *

+ * The selected size must be less than or equal to the current dimension size. + * A subset of a rectangle selection is defined by the starting position and + * selected sizes. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {1, 2};
+     * long[] selectedDims = {3, 3};
+     * then the following subset is selected by the startDims and selectedDims above:
+     *     12, 13, 14
+     *     22, 23, 24
+     *     32, 33, 34
+     * 
+ */ + protected long[] selectedDims; + + /** + * The starting position of each dimension of a selected subset. With both + * the starting position and selected sizes, the subset of a rectangle + * selection is fully defined. + */ + protected long[] startDims; + + /** + * Array that contains the indices of the dimensions selected for display. + *

+ * selectedIndex[] is provided for two purposes: + *

    + *
  1. + * selectedIndex[] is used to indicate the order of dimensions for display, + * i.e. selectedIndex[0] = row, selectedIndex[1] = column and + * selectedIndex[2] = depth. For example, for a four dimension dataset, if + * selectedIndex[] is {1, 2, 3}, then dim[1] is selected as row index, + * dim[2] is selected as column index and dim[3] is selected as depth index. + *
  2. + * selectedIndex[] is also used to select dimensions for display for + * datasets with three or more dimensions. We assume that applications such + * as HDFView can only display data up to three dimensions (a 2D + * spreadsheet/image with a third dimension that the 2D spreadsheet/image is + * cut from). For datasets with more than three dimensions, we need + * selectedIndex[] to store which three dimensions are chosen for display. + * For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3}, + * then dim[1] is selected as row index, dim[2] is selected as column index + * and dim[3] is selected as depth index. dim[0] is not selected. Its + * location is fixed at 0 by default. + *
+ */ + protected final int[] selectedIndex; + + /** + * The number of elements to move from the start location in each dimension. + * For example, if selectedStride[0] = 2, every other data point is selected + * along dim[0]. + */ + protected long[] selectedStride; + + /** + * The array of dimension sizes for a chunk. + */ + protected long[] chunkSize; + + /** The compression information. */ + protected StringBuilder compression; + public static final String COMPRESSION_GZIP_TXT = "GZIP: level = "; + + /** The filters information. */ + protected StringBuilder filters; + + /** The storage layout information. */ + protected StringBuilder storageLayout; + + /** The storage information. */ + protected StringBuilder storage; + + /** The datatype object of the dataset. */ + protected Datatype datatype; + + /** + * Array of strings that represent the dimension names. It is null if dimension names do not exist. + */ + protected String[] dimNames; + + /** Flag to indicate if the byte[] array is converted to strings */ + protected boolean convertByteToString = true; + + /** Flag to indicate if data values are loaded into memory. */ + protected boolean isDataLoaded = false; + + /** Flag to indicate if this dataset has been initialized */ + protected boolean inited = false; + + /** The number of data points in the memory buffer. */ + protected long nPoints = 1; + + /** + * The data buffer that contains the raw data directly reading from file + * (before any data conversion). + */ + protected transient Object originalBuf = null; + + /** + * The array that holds the converted data of unsigned C-type integers. + *

+ * For example, Suppose that the original data is an array of unsigned + * 16-bit short integers. Since Java does not support unsigned integer, the + * data is converted to an array of 32-bit singed integer. In that case, the + * converted buffer is the array of 32-bit singed integer. + */ + protected transient Object convertedBuf = null; + + /** + * Constructs a Dataset object with a given file, name and path. + * + * @param theFile + * the file that contains the dataset. + * @param dsName + * the name of the Dataset, e.g. "dset1". + * @param dsPath + * the full group path of this Dataset, e.g. "/arrays/". + */ + public Dataset(FileFormat theFile, String dsName, String dsPath) { + this(theFile, dsName, dsPath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #Dataset(FileFormat, String, String)} + * + * @param theFile + * the file that contains the dataset. + * @param dsName + * the name of the Dataset, e.g. "dset1". + * @param dsPath + * the full group path of this Dataset, e.g. "/arrays/". + * @param oid + * the oid of this Dataset. + */ + @Deprecated + public Dataset(FileFormat theFile, String dsName, String dsPath, long[] oid) { + super(theFile, dsName, dsPath, oid); + + datatype = null; + rank = -1; + data = null; + dims = null; + maxDims = null; + selectedDims = null; + startDims = null; + selectedStride = null; + chunkSize = null; + compression = new StringBuilder("NONE"); + filters = new StringBuilder("NONE"); + storageLayout = new StringBuilder("NONE"); + storage = new StringBuilder("NONE"); + dimNames = null; + + selectedIndex = new int[3]; + selectedIndex[0] = 0; + selectedIndex[1] = 1; + selectedIndex[2] = 2; + } + + /** + * Clears memory held by the dataset, such as the data buffer. + */ + @SuppressWarnings("rawtypes") + public void clear() { + if (data != null) { + if (data instanceof List) { + ((List) data).clear(); + } + data = null; + originalBuf = null; + convertedBuf = null; + } + isDataLoaded = false; + } + + /** + * Returns the rank (number of dimensions) of the dataset. + * + * @return the number of dimensions of the dataset. + */ + @Override + public final int getRank() { + if (!inited) + init(); + + return rank; + } + + /** + * Returns the array that contains the dimension sizes of the dataset. + * + * @return the dimension sizes of the dataset. + */ + @Override + public final long[] getDims() { + if (!inited) + init(); + + return dims; + } + + /** + * Returns the array that contains the max dimension sizes of the dataset. + * + * @return the max dimension sizes of the dataset. + */ + public final long[] getMaxDims() { + if (!inited) init(); + + if (maxDims == null) return dims; + + return maxDims; + } + + /** + * Returns the dimension sizes of the selected subset. + *

+ * The SelectedDims is the number of data points of the selected subset. + * Applications can use this array to change the size of selected subset. + * + * The selected size must be less than or equal to the current dimension size. + * Combined with the starting position, selected sizes and stride, the + * subset of a rectangle selection is fully defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {1, 2};
+     * long[] selectedDims = {3, 3};
+     * long[] selectedStride = {1, 1};
+     * then the following subset is selected by the startDims and selectedDims
+     *     12, 13, 14
+     *     22, 23, 24
+     *     32, 33, 34
+     * 
+ * + * @return the dimension sizes of the selected subset. + */ + @Override + public final long[] getSelectedDims() { + if (!inited) init(); + + return selectedDims; + } + + /** + * Returns the starting position of a selected subset. + *

+ * Applications can use this array to change the starting position of a + * selection. Combined with the selected dimensions, selected sizes and + * stride, the subset of a rectangle selection is fully defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {1, 2};
+     * long[] selectedDims = {3, 3};
+     * long[] selectedStride = {1, 1};
+     * then the following subset is selected by the startDims and selectedDims
+     *     12, 13, 14
+     *     22, 23, 24
+     *     32, 33, 34
+     * 
+ * + * @return the starting position of a selected subset. + */ + @Override + public final long[] getStartDims() { + if (!inited) init(); + + return startDims; + } + + /** + * Returns the selectedStride of the selected dataset. + *

+ * Applications can use this array to change how many elements to move in + * each dimension. + * + * Combined with the starting position and selected sizes, the subset of a + * rectangle selection is defined. + *

+ * For example, if a 4 X 5 dataset is as follows: + * + *

+     *     0,  1,  2,  3,  4
+     *    10, 11, 12, 13, 14
+     *    20, 21, 22, 23, 24
+     *    30, 31, 32, 33, 34
+     * long[] dims = {4, 5};
+     * long[] startDims = {0, 0};
+     * long[] selectedDims = {2, 2};
+     * long[] selectedStride = {2, 3};
+     * then the following subset is selected by the startDims and selectedDims
+     *     0,   3
+     *     20, 23
+     * 
+ * + * @return the selectedStride of the selected dataset. + */ + @Override + public final long[] getStride() { + if (!inited) init(); + + if (rank <= 0) { + return null; + } + + if (selectedStride == null) { + selectedStride = new long[rank]; + for (int i = 0; i < rank; i++) { + selectedStride[i] = 1; + } + } + + return selectedStride; + } + + /** + * Sets the flag that indicates if a byte array is converted to a string + * array. + *

+ * In a string dataset, the raw data from file is stored in a byte array. By + * default, this byte array is converted to an array of strings. For a large + * dataset (e.g. more than one million strings), the conversion takes a long + * time and requires a lot of memory space to store the strings. In some + * applications, such a conversion can be delayed. For example, A GUI + * application may convert only the part of the strings that is visible to the + * users, not the entire data array. + *

+ * setConvertByteToString(boolean b) allows users to set the flag so that + * applications can choose to perform the byte-to-string conversion or not. + * If the flag is set to false, the getData() returns an array of byte + * instead of an array of strings. + * + * @param b + * convert bytes to strings if b is true; otherwise, if false, do + * not convert bytes to strings. + */ + public final void setConvertByteToString(boolean b) { + convertByteToString = b; + } + + /** + * Returns the flag that indicates if a byte array is converted to a string + * array. + * + * @return true if byte array is converted to string; otherwise, returns + * false if there is no conversion. + */ + public final boolean getConvertByteToString() { + return convertByteToString; + } + + /** + * Reads the raw data of the dataset from file to a byte array. + *

+ * readBytes() reads raw data to an array of bytes instead of array of its + * datatype. For example, for a one-dimension 32-bit integer dataset of + * size 5, readBytes() returns a byte array of size 20 instead of an + * int array of 5. + *

+ * readBytes() can be used to copy data from one dataset to another + * efficiently because the raw data is not converted to its native type, it + * saves memory space and CPU time. + * + * @return the byte array of the raw data. + * + * @throws Exception if data can not be read + */ + public abstract byte[] readBytes() throws Exception; + + /** + * Writes the memory buffer of this dataset to file. + * + * @throws Exception if buffer can not be written + */ + @Override + public final void write() throws Exception { + if (data != null) { + write(data); + } + } + + /** + * Creates a new dataset and writes the data buffer to the new dataset. + *

+ * This function allows applications to create a new dataset for a given + * data buffer. For example, users can select a specific interesting part + * from a large image and create a new image with the selection. + *

+ * The new dataset retains the datatype and dataset creation properties of + * this dataset. + * + * @param pgroup + * the group which the dataset is copied to. + * @param name + * the name of the new dataset. + * @param dims + * the dimension sizes of the the new dataset. + * @param data + * the data values of the subset to be copied. + * + * @return the new dataset. + * + * @throws Exception if dataset can not be copied + */ + public abstract Dataset copy(Group pgroup, String name, long[] dims, Object data) throws Exception; + + @Override + public final boolean isInited() { + return inited; + } + + /** + * Returns the data buffer of the dataset in memory. + *

+ * If data is already loaded into memory, returns the data; otherwise, calls + * read() to read data from file into a memory buffer and returns the memory + * buffer. + *

+ * By default, the whole dataset is read into memory. Users can also select + * a subset to read. Subsetting is done in an implicit way. + *

+ * How to Select a Subset + *

+ * A selection is specified by three arrays: start, stride and count. + *

    + *
  1. start: offset of a selection + *
  2. stride: determines how many elements to move in each dimension + *
  3. count: number of elements to select in each dimension + *
+ * getStartDims(), getStride() and getSelectedDims() returns the start, + * stride and count arrays respectively. Applications can make a selection + * by changing the values of the arrays. + *

+ * The following example shows how to make a subset. In the example, the + * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; + * dims[1]=100; dims[2]=50; dims[3]=10;
+ * We want to select every other data point in dims[1] and dims[2] + * + *

+     * int rank = dataset.getRank(); // number of dimensions of the dataset
+     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
+     * long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
+     * long[] start = dataset.getStartDims(); // the offset of the selection
+     * long[] stride = dataset.getStride(); // the stride of the dataset
+     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
+     *
+     * // select dim1 and dim2 as 2D data for display,and slice through dim0
+     * selectedIndex[0] = 1;
+     * selectedIndex[1] = 2;
+     * selectedIndex[1] = 0;
+     *
+     * // reset the selection arrays
+     * for (int i = 0; i < rank; i++) {
+     *     start[i] = 0;
+     *     selected[i] = 1;
+     *     stride[i] = 1;
+     * }
+     *
+     * // set stride to 2 on dim1 and dim2 so that every other data point is
+     * // selected.
+     * stride[1] = 2;
+     * stride[2] = 2;
+     *
+     * // set the selection size of dim1 and dim2
+     * selected[1] = dims[1] / stride[1];
+     * selected[2] = dims[1] / stride[2];
+     *
+     * // when dataset.getData() is called, the selection above will be used since
+     * // the dimension arrays are passed by reference. Changes of these arrays
+     * // outside the dataset object directly change the values of these array
+     * // in the dataset object.
+     * 
+ *

+ * For ScalarDS, the memory data buffer is a one-dimensional array of byte, + * short, int, float, double or String type based on the datatype of the + * dataset. + *

+ * For CompoundDS, the memory data object is an java.util.List object. Each + * element of the list is a data array that corresponds to a compound field. + *

+ * For example, if compound dataset "comp" has the following nested + * structure, and member datatypes + * + *

+     * comp --> m01 (int)
+     * comp --> m02 (float)
+     * comp --> nest1 --> m11 (char)
+     * comp --> nest1 --> m12 (String)
+     * comp --> nest1 --> nest2 --> m21 (long)
+     * comp --> nest1 --> nest2 --> m22 (double)
+     * 
+ * + * getData() returns a list of six arrays: {int[], float[], char[], + * String[], long[] and double[]}. + * + * @return the memory buffer of the dataset. + * + * @throws Exception if object can not be read + * @throws OutOfMemoryError if memory is exhausted + */ + @Override + public final Object getData() throws Exception, OutOfMemoryError { + log.trace("getData: start"); + if (!isDataLoaded) { + log.trace("getData: read"); + data = read(); // load the data + originalBuf = data; + isDataLoaded = true; + nPoints = 1; + log.trace("getData: selectedDims length={}",selectedDims.length); + for (int j = 0; j < selectedDims.length; j++) { + nPoints *= selectedDims[j]; + } + log.trace("getData: read {}", nPoints); + } + + log.trace("getData: finish"); + return data; + } + + /** + * Not for public use in the future. + *

+ * setData() is not safe to use because it changes memory buffer + * of the dataset object. Dataset operations such as write/read + * will fail if the buffer type or size is changed. + * + * @param d the object data -must be an array of Objects + */ + @Override + public final void setData(Object d) { + if (!(this instanceof Attribute)) + throw new UnsupportedOperationException("setData: unsupported for non-Attribute objects"); + + log.trace("setData"); + data = d; + } + + /** + * Clears the current data buffer in memory and forces the next read() to load + * the data from file. + *

+ * The function read() loads data from file into memory only if the data is + * not read. If data is already in memory, read() just returns the memory + * buffer. Sometimes we want to force read() to re-read data from file. For + * example, when the selection is changed, we need to re-read the data. + * + * @see #getData() + * @see #read() + */ + @Override + public void clearData() { + isDataLoaded = false; + } + + /** + * Returns the dimension size of the vertical axis. + * + *

+ * This function is used by GUI applications such as HDFView. GUI + * applications display a dataset in a 2D table or 2D image. The display + * order is specified by the index array of selectedIndex as follow: + *

+ *
selectedIndex[0] -- height
+ *
The vertical axis
+ *
selectedIndex[1] -- width
+ *
The horizontal axis
+ *
selectedIndex[2] -- depth
+ *
The depth axis is used for 3 or more dimensional datasets.
+ *
+ * Applications can use getSelectedIndex() to access and change the display + * order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the + * following code will set the height=200 and width=50. + * + *
+     * int[] selectedIndex = dataset.getSelectedIndex();
+     * selectedIndex[0] = 0;
+     * selectedIndex[1] = 1;
+     * 
+ * + * @see #getSelectedIndex() + * @see #getWidth() + * + * @return the size of dimension of the vertical axis. + */ + @Override + public final long getHeight() { + if (!inited) init(); + + if ((selectedDims == null) || (selectedIndex == null)) { + return 0; + } + + return selectedDims[selectedIndex[0]]; + } + + /** + * Returns the dimension size of the horizontal axis. + * + *

+ * This function is used by GUI applications such as HDFView. GUI + * applications display a dataset in 2D Table or 2D Image. The display order is + * specified by the index array of selectedIndex as follow: + *

+ *
selectedIndex[0] -- height
+ *
The vertical axis
+ *
selectedIndex[1] -- width
+ *
The horizontal axis
+ *
selectedIndex[2] -- depth
+ *
The depth axis, which is used for 3 or more dimension datasets.
+ *
+ * Applications can use getSelectedIndex() to access and change the display + * order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the + * following code will set the height=200 and width=100. + * + *
+     * int[] selectedIndex = dataset.getSelectedIndex();
+     * selectedIndex[0] = 0;
+     * selectedIndex[1] = 1;
+     * 
+ * + * @see #getSelectedIndex() + * @see #getHeight() + * + * @return the size of dimension of the horizontal axis. + */ + @Override + public final long getWidth() { + if (!inited) init(); + + if ((selectedDims == null) || (selectedIndex == null)) { + return 0; + } + + if ((selectedDims.length < 2) || (selectedIndex.length < 2)) { + return 1; + } + + return selectedDims[selectedIndex[1]]; + } + + /** + * Returns the indices of display order. + *

+ * + * selectedIndex[] is provided for two purposes: + *

    + *
  1. + * selectedIndex[] is used to indicate the order of dimensions for display. + * selectedIndex[0] is for the row, selectedIndex[1] is for the column and + * selectedIndex[2] for the depth. + *

    + * For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3}, + * then dim[1] is selected as row index, dim[2] is selected as column index + * and dim[3] is selected as depth index. + *

  2. + * selectedIndex[] is also used to select dimensions for display for + * datasets with three or more dimensions. We assume that applications such + * as HDFView can only display data values up to three dimensions (2D + * spreadsheet/image with a third dimension which the 2D spreadsheet/image + * is selected from). For datasets with more than three dimensions, we need + * selectedIndex[] to tell applications which three dimensions are chosen + * for display.
    + * For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3}, + * then dim[1] is selected as row index, dim[2] is selected as column index + * and dim[3] is selected as depth index. dim[0] is not selected. Its + * location is fixed at 0 by default. + *
+ * + * @return the array of the indices of display order. + */ + @Override + public final int[] getSelectedIndex() { + if (!inited) init(); + + return selectedIndex; + } + + /** + * Returns the string representation of compression information. + *

+ * For example, + * "SZIP: Pixels per block = 8: H5Z_FILTER_CONFIG_DECODE_ENABLED". + * + * @return the string representation of compression information. + */ + @Override + public final String getCompression() { + if (!inited) init(); + + return compression.toString(); + } + + /** + * Returns the string representation of filter information. + * + * @return the string representation of filter information. + */ + public final String getFilters() { + if (!inited) init(); + + return filters.toString(); + } + + /** + * Returns the string representation of storage layout information. + * + * @return the string representation of storage layout information. + */ + public final String getStorageLayout() { + if (!inited) init(); + + return storageLayout.toString(); + } + + /** + * Returns the string representation of storage information. + * + * @return the string representation of storage information. + */ + public final String getStorage() { + if (!inited) init(); + + return storage.toString(); + } + + /** + * Returns the array that contains the dimension sizes of the chunk of the + * dataset. Returns null if the dataset is not chunked. + * + * @return the array of chunk sizes or returns null if the dataset is not + * chunked. + */ + public final long[] getChunkSize() { + if (!inited) init(); + + return chunkSize; + } + + @Override + public Datatype getDatatype() { + return datatype; + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #convertFromUnsignedC(Object, Object)} + * + * @param dataIN the object data + * + * @return the converted object + */ + @Deprecated + public static Object convertFromUnsignedC(Object dataIN) { + return Dataset.convertFromUnsignedC(dataIN, null); + } + + /** + * Converts one-dimension array of unsigned C-type integers to a new array + * of appropriate Java integer in memory. + *

+ * Since Java does not support unsigned integer, values of unsigned C-type + * integers must be converted into its appropriate Java integer. Otherwise, + * the data value will not displayed correctly. For example, if an unsigned + * C byte, x = 200, is stored into an Java byte y, y will be -56 instead of + * the correct value of 200. + *

+ * Unsigned C integers are upgrade to Java integers according to the + * following table: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Mapping Unsigned C Integers to Java Integers
Unsigned C IntegerJAVA Integer
unsigned bytesigned short
unsigned shortsigned int
unsigned intsigned long
unsigned longsigned long
+ * NOTE: this conversion cannot deal with unsigned 64-bit integers. + * Therefore, the values of unsigned 64-bit datasets may be wrong in Java + * applications. + *

+ * If memory data of unsigned integers is converted by + * convertFromUnsignedC(), convertToUnsignedC() must be called to convert + * the data back to unsigned C before data is written into file. + * + * @see #convertToUnsignedC(Object, Object) + * + * @param dataIN + * the input 1D array of the unsigned C-type integers. + * @param dataOUT + * the output converted (or upgraded) 1D array of Java integers. + * + * @return the upgraded 1D array of Java integers. + */ + @SuppressWarnings("rawtypes") + public static Object convertFromUnsignedC(Object dataIN, Object dataOUT) { + log.trace("convertFromUnsignedC(): start"); + + if (dataIN == null) { + log.debug("convertFromUnsignedC(): data_in is null"); + log.trace("convertFromUnsignedC(): finish"); + return null; + } + + Class dataClass = dataIN.getClass(); + if (!dataClass.isArray()) { + log.debug("convertFromUnsignedC(): data_in not an array"); + log.trace("convertFromUnsignedC(): finish"); + return null; + } + + if (dataOUT != null) { + Class dataClassOut = dataOUT.getClass(); + if (!dataClassOut.isArray() || (Array.getLength(dataIN) != Array.getLength(dataOUT))) { + log.debug("convertFromUnsignedC(): data_out not an array or does not match data_in size"); + dataOUT = null; + } + } + + String cname = dataClass.getName(); + char dname = cname.charAt(cname.lastIndexOf('[') + 1); + int size = Array.getLength(dataIN); + log.trace("convertFromUnsignedC(): cname={} dname={} size={}", cname, dname, size); + + if (dname == 'B') { + log.debug("convertFromUnsignedC(): Java convert byte to short"); + short[] sdata = null; + if (dataOUT == null) { + sdata = new short[size]; + } + else { + sdata = (short[]) dataOUT; + } + + byte[] bdata = (byte[]) dataIN; + for (int i = 0; i < size; i++) { + sdata[i] = (short) ((bdata[i] + 256) & 0xFF); + } + + dataOUT = sdata; + } + else if (dname == 'S') { + log.debug("convertFromUnsignedC(): Java convert short to int"); + int[] idata = null; + if (dataOUT == null) { + idata = new int[size]; + } + else { + idata = (int[]) dataOUT; + } + + short[] sdata = (short[]) dataIN; + for (int i = 0; i < size; i++) { + idata[i] = (sdata[i] + 65536) & 0xFFFF; + } + + dataOUT = idata; + } + else if (dname == 'I') { + log.debug("convertFromUnsignedC(): Java convert int to long"); + long[] ldata = null; + if (dataOUT == null) { + ldata = new long[size]; + } + else { + ldata = (long[]) dataOUT; + } + + int[] idata = (int[]) dataIN; + for (int i = 0; i < size; i++) { + ldata[i] = (idata[i] + 4294967296L) & 0xFFFFFFFFL; + } + + dataOUT = ldata; + } + else { + dataOUT = dataIN; + log.debug("convertFromUnsignedC(): Java does not support unsigned long"); + } + + log.trace("convertFromUnsignedC(): finish"); + return dataOUT; + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #convertToUnsignedC(Object, Object)} + * + * @param dataIN + * the input 1D array of the unsigned C-type integers. + * + * @return the upgraded 1D array of Java integers. + */ + @Deprecated + public static Object convertToUnsignedC(Object dataIN) { + return Dataset.convertToUnsignedC(dataIN, null); + } + + /** + * Converts the array of converted unsigned integers back to unsigned C-type + * integer data in memory. + *

+ * If memory data of unsigned integers is converted by + * convertFromUnsignedC(), convertToUnsignedC() must be called to convert + * the data back to unsigned C before data is written into file. + * + * @see #convertFromUnsignedC(Object, Object) + * + * @param dataIN + * the input array of the Java integer. + * @param dataOUT + * the output array of the unsigned C-type integer. + * + * @return the converted data of unsigned C-type integer array. + */ + @SuppressWarnings("rawtypes") + public static Object convertToUnsignedC(Object dataIN, Object dataOUT) { + log.trace("convertToUnsignedC(): start"); + + if (dataIN == null) { + log.debug("convertToUnsignedC(): data_in is null"); + log.trace("convertToUnsignedC(): finish"); + return null; + } + + Class dataClass = dataIN.getClass(); + if (!dataClass.isArray()) { + log.debug("convertToUnsignedC(): data_in not an array"); + log.trace("convertToUnsignedC(): finish"); + return null; + } + + if (dataOUT != null) { + Class dataClassOut = dataOUT.getClass(); + if (!dataClassOut.isArray() || (Array.getLength(dataIN) != Array.getLength(dataOUT))) { + log.debug("convertToUnsignedC(): data_out not an array or does not match data_in size"); + dataOUT = null; + } + } + + String cname = dataClass.getName(); + char dname = cname.charAt(cname.lastIndexOf('[') + 1); + int size = Array.getLength(dataIN); + log.trace("convertToUnsignedC(): cname={} dname={} size={}", cname, dname, size); + + if (dname == 'S') { + log.debug("convertToUnsignedC(): Java convert short to byte"); + byte[] bdata = null; + if (dataOUT == null) { + bdata = new byte[size]; + } + else { + bdata = (byte[]) dataOUT; + } + short[] sdata = (short[]) dataIN; + for (int i = 0; i < size; i++) { + bdata[i] = (byte) sdata[i]; + } + dataOUT = bdata; + } + else if (dname == 'I') { + log.debug("convertToUnsignedC(): Java convert int to short"); + short[] sdata = null; + if (dataOUT == null) { + sdata = new short[size]; + } + else { + sdata = (short[]) dataOUT; + } + int[] idata = (int[]) dataIN; + for (int i = 0; i < size; i++) { + sdata[i] = (short) idata[i]; + } + dataOUT = sdata; + } + else if (dname == 'J') { + log.debug("convertToUnsignedC(): Java convert long to int"); + int[] idata = null; + if (dataOUT == null) { + idata = new int[size]; + } + else { + idata = (int[]) dataOUT; + } + long[] ldata = (long[]) dataIN; + for (int i = 0; i < size; i++) { + idata[i] = (int) ldata[i]; + } + dataOUT = idata; + } + else { + dataOUT = dataIN; + log.debug("convertToUnsignedC(): Java does not support unsigned long"); + } + + log.trace("convertToUnsignedC(): finish"); + return dataOUT; + } + + /** + * Converts an array of bytes into an array of Strings for a fixed string + * dataset. + *

+ * A C-string is an array of chars while an Java String is an object. When a + * string dataset is read into a Java application, the data is stored in an + * array of Java bytes. byteToString() is used to convert the array of bytes + * into an array of Java strings so that applications can display and modify + * the data content. + *

+ * For example, the content of a two element C string dataset is {"ABC", + * "abc"}. Java applications will read the data into a byte array of {65, + * 66, 67, 97, 98, 99). byteToString(bytes, 3) returns an array of Java + * String of strs[0]="ABC", and strs[1]="abc". + *

+ * If memory data of strings is converted to Java Strings, stringToByte() + * must be called to convert the memory data back to byte array before data + * is written to file. + * + * @see #stringToByte(String[], int) + * + * @param bytes + * the array of bytes to convert. + * @param length + * the length of string. + * + * @return the array of Java String. + */ + public static final String[] byteToString(byte[] bytes, int length) { + log.trace("byteToString(): start"); + + if (bytes == null) { + log.debug("byteToString(): input is null"); + log.trace("byteToString(): finish"); + return null; + } + + int n = bytes.length / length; + log.trace("byteToString(): n={} from length of {}", n, length); + String[] strArray = new String[n]; + String str = null; + int idx = 0; + for (int i = 0; i < n; i++) { + str = new String(bytes, i * length, length); + idx = str.indexOf('\0'); + if (idx >= 0) { + str = str.substring(0, idx); + } + + // trim only the end + int end = str.length(); + while (end > 0 && str.charAt(end - 1) <= '\u0020') + end--; + + strArray[i] = (end <= 0) ? "" : str.substring(0, end); + } + + log.trace("byteToString(): finish"); + return strArray; + } + + /** + * Converts a string array into an array of bytes for a fixed string + * dataset. + *

+ * If memory data of strings is converted to Java Strings, stringToByte() + * must be called to convert the memory data back to byte array before data + * is written to file. + * + * @see #byteToString(byte[] bytes, int length) + * + * @param strings + * the array of string. + * @param length + * the length of string. + * + * @return the array of bytes. + */ + public static final byte[] stringToByte(String[] strings, int length) { + log.trace("stringToByte(): start"); + + if (strings == null) { + log.debug("stringToByte(): input is null"); + log.trace("stringToByte(): finish"); + return null; + } + + int size = strings.length; + byte[] bytes = new byte[size * length]; + log.trace("stringToByte(): size={} length={}", size, length); + StringBuilder strBuff = new StringBuilder(length); + for (int i = 0; i < size; i++) { + // initialize the string with spaces + strBuff.replace(0, length, " "); + + if (strings[i] != null) { + if (strings[i].length() > length) { + strings[i] = strings[i].substring(0, length); + } + strBuff.replace(0, length, strings[i]); + } + + strBuff.setLength(length); + System.arraycopy(strBuff.toString().getBytes(), 0, bytes, length * i, length); + } + + log.trace("stringToByte(): finish"); + + return bytes; + } + + /** + * Returns the array of strings that represent the dimension names. Returns + * null if there is no dimension name. + *

+ * Some datasets have pre-defined names for each dimension such as + * "Latitude" and "Longitude". getDimNames() returns these pre-defined + * names. + * + * @return the names of dimensions, or null if there is no dimension name. + */ + public final String[] getDimNames() { + if (!inited) init(); + + return dimNames; + } + + /** + * Checks if a given datatype is a string. Sub-classes must replace this + * default implementation. + * + * @param tid + * The data type identifier. + * + * @return true if the datatype is a string; otherwise returns false. + */ + public boolean isString(long tid) { + return false; + } + + /** + * Returns the size in bytes of a given datatype. Sub-classes must replace + * this default implementation. + * + * @param tid + * The data type identifier. + * + * @return The size of the datatype + */ + public long getSize(long tid) { + return -1; + } + + /** + * Get Class of the original data buffer if converted. + * + * @return the Class of originalBuf + */ + @Override + @SuppressWarnings("rawtypes") + public final Class getOriginalClass() { + return originalBuf.getClass(); + } + + /* + * Checks if dataset is virtual. Sub-classes must replace + * this default implementation. + * + * @return true if the dataset is virtual; otherwise returns false. + */ + public boolean isVirtual() { + return false; + } + + /* + * Gets the source file name at index if dataset is virtual. Sub-classes must replace + * this default implementation. + * + * @return filename if the dataset is virtual; otherwise returns null. + */ + public String getVirtualFilename(int index) { + return null; + } + + /* + * Gets the number of source files if dataset is virtual. Sub-classes must replace + * this default implementation. + * + * @return the list size if the dataset is virtual; otherwise returns negative. + */ + public int getVirtualMaps() { + return -1; + } +} diff --git a/src/main/java/hdf/object/Datatype.java b/src/main/java/hdf/object/Datatype.java new file mode 100644 index 0000000..31b8567 --- /dev/null +++ b/src/main/java/hdf/object/Datatype.java @@ -0,0 +1,933 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +/** + * Datatype is an abstract class that defines datatype characteristics and APIs for a data type. + *

+ * A datatype has four basic characteristics: class, size, byte order and sign. These + * characteristics are defined in the + * HDF5 User's Guide. + *

+ * These characteristics apply to all the sub-classes. The sub-classes may have different ways to + * describe a datatype. We here define the native datatype to the datatype used by + * the sub-class. For example, H5Datatype uses a datatype identifier (hid_t) to specify a datatype. + * NC2Datatype uses ucar.nc2.DataType object to describe its datatype. "Native" here is different + * from the "native" definition in the HDF5 library. + *

+ * Two functions, createNative() and fromNative(), are defined to convert the general + * characteristics to/from the native datatype. Sub-classes must implement these functions so that + * the conversion will be done correctly. The values of the CLASS member are not identical to HDF5 + * values for a datatype class. + *

+ * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public abstract class Datatype extends HObject implements MetaDataContainer { + + private static final long serialVersionUID = -581324710549963177L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Datatype.class); + + /** + * The default definition for datatype size, order, and sign. + */ + public static final int NATIVE = -1; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_NO_CLASS = -1; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_INTEGER = 0; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_FLOAT = 1; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_CHAR = 2; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_STRING = 3; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_BITFIELD = 4; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_OPAQUE = 5; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_COMPOUND = 6; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_REFERENCE = 7; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_ENUM = 8; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_VLEN = 9; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_ARRAY = 10; + + /** + * See HDF5 User's Guide + */ + public static final int CLASS_TIME = 11; + + /** + * See HDF5 User's Guide + */ + public static final int ORDER_LE = 0; + + /** + * See HDF5 User's Guide + */ + public static final int ORDER_BE = 1; + + /** + * See HDF5 User's Guide + */ + public static final int ORDER_VAX = 2; + + /** + * See HDF5 User's Guide + */ + public static final int ORDER_NONE = 3; + + /** + * See HDF5 User's Guide + */ + public static final int SIGN_NONE = 0; + + /** + * See HDF5 User's Guide + */ + public static final int SIGN_2 = 1; + + /** + * See HDF5 User's Guide + */ + public static final int NSGN = 2; + + protected String datatypeDescription = null; + + /** + * The class of the datatype. + */ + protected int datatypeClass; + + /** + * The size (in bytes) of the datatype. + */ + protected long datatypeSize; + + /** + * The byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, and + * ORDER_VAX. + */ + protected int datatypeOrder; + + /** + * The sign of the datatype. + */ + protected int datatypeSign; + + /** + * The base datatype of this datatype (null if this datatype is atomic). + */ + protected Datatype baseType; + + /** + * The dimensions of the ARRAY element of an ARRAY datatype. + */ + protected long[] arrayDims; + + /** + * Determines whether this datatype is a variable-length type. + */ + protected boolean isVLEN = false; + protected boolean isVariableStr = false; + + /** + * The (name, value) pairs of enum members. + */ + protected Map enumMembers; + + /** + * The list of names of members of a compound Datatype. + */ + protected List compoundMemberNames; + + /** + * The list of types of members of a compound Datatype. + */ + protected List compoundMemberTypes; + + /** + * The list of offsets of members of a compound Datatype. + */ + protected List compoundMemberOffsets; + + /** + * Constructs a named datatype with a given file, name and path. + * + * @param theFile + * the HDF file. + * @param typeName + * the name of the datatype, e.g "12-bit Integer". + * @param typePath + * the full group path of the datatype, e.g. "/datatypes/". + */ + public Datatype(FileFormat theFile, String typeName, String typePath) { + this(theFile, typeName, typePath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #Datatype(FileFormat, String, String)} + * + * @param theFile + * the HDF file. + * @param typeName + * the name of the datatype, e.g "12-bit Integer". + * @param typePath + * the full group path of the datatype, e.g. "/datatypes/". + * @param oid + * the oidof the datatype. + */ + @Deprecated + public Datatype(FileFormat theFile, String typeName, String typePath, long[] oid) { + super(theFile, typeName, typePath, oid); + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of Datatype. + *

    + *
  1. to create unsigned native integer
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4. + * Valid values are NATIVE or a positive value. + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX, + * ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE. + * + * @throws Exception + * if there is an error + */ + public Datatype(int tclass, int tsize, int torder, int tsign) throws Exception { + this(tclass, tsize, torder, tsign, null); + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of Datatype. + *

    + *
  1. to create unsigned native integer
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and + * etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, + * the size is 4. + * Valid values are NATIVE or a positive value. + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, + * ORDER_BE, ORDER_VAX, ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE. + * @param tbase + * the base datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { + this(tclass, tsize, torder, tsign, tbase, null); + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of Datatype. + *

    + *
  1. to create unsigned native integer
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4. + * Valid values are NATIVE or a positive value. + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX, + * ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE. + * @param tbase + * the base datatype of the new datatype + * @param pbase + * the parent datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, Datatype pbase) throws Exception { + if ((tsize == 0) || (tsize < 0 && tsize != NATIVE)) + throw new Exception("invalid datatype size - " + tsize); + if ((torder != ORDER_LE) && (torder != ORDER_BE) && (torder != ORDER_VAX) + && (torder != ORDER_NONE) && (torder != NATIVE)) + throw new Exception("invalid datatype order - " + torder); + if ((tsign != SIGN_NONE) && (tsign != SIGN_2) && (tsign != NATIVE)) + throw new Exception("invalid datatype sign - " + tsign); + + datatypeClass = tclass; + datatypeSize = tsize; + datatypeOrder = torder; + datatypeSign = tsign; + enumMembers = null; + baseType = tbase; + arrayDims = null; + isVariableStr = (datatypeClass == Datatype.CLASS_STRING) && (tsize < 0); + isVLEN = (datatypeClass == Datatype.CLASS_VLEN) || isVariableStr; + + compoundMemberNames = new ArrayList<>(); + compoundMemberTypes = new ArrayList<>(); + compoundMemberOffsets = new ArrayList<>(); + + log.trace("datatypeClass={} datatypeSize={} datatypeOrder={} datatypeSign={} baseType={}", + datatypeClass, datatypeSize, datatypeOrder, datatypeSign, baseType); + } + + /** + * Constructs a Datatype with a given native datatype identifier. + *

+ * For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5, + * + *

+     * long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
+     * Datatype dtype = new Datatype(tid);
+     * 
+ * + * will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE); + * + * @see #fromNative(long tid) + * @param tid + * the native datatype identifier. + * +* @throws Exception + * if there is an error + */ + public Datatype(long tid) throws Exception { + this(tid, null); + } + + /** + * Constructs a Datatype with a given native datatype identifier. + *

+ * For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5, + * + *

+     * long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
+     * Datatype dtype = new Datatype(tid);
+     * 
+ * + * will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE); + * + * @see #fromNative(long tid) + * @param tid + * the native datatype identifier. + * @param pbase + * the parent datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public Datatype(long tid, Datatype pbase) throws Exception { + this(CLASS_NO_CLASS, NATIVE, NATIVE, NATIVE, null, pbase); + } + + /** + * Opens access to this named datatype. Sub-classes must replace this default implementation. For + * example, in H5Datatype, open() function H5.H5Topen(loc_id, name) to get the datatype identifier. + * + * @return the datatype identifier if successful; otherwise returns negative value. + */ + @Override + public long open() { + return -1; + } + + /** + * Closes a datatype identifier. + *

+ * Sub-classes must replace this default implementation. + * + * @param id + * the datatype identifier to close. + */ + @Override + public abstract void close(long id); + + /** + * Returns the class of the datatype. Valid values are: + *

    + *
  • CLASS_NO_CLASS + *
  • CLASS_INTEGER + *
  • CLASS_FLOAT + *
  • CLASS_CHAR + *
  • CLASS_STRING + *
  • CLASS_BITFIELD + *
  • CLASS_OPAQUE + *
  • CLASS_COMPOUND + *
  • CLASS_REFERENCE + *
  • CLASS_ENUM + *
  • CLASS_VLEN + *
  • CLASS_ARRAY + *
+ * + * @return the class of the datatype. + */ + public int getDatatypeClass() { + return datatypeClass; + } + + /** + * Returns the size of the datatype in bytes. For example, for a 32-bit + * integer, the size is 4 (bytes). + * + * @return the size of the datatype. + */ + public long getDatatypeSize() { + return datatypeSize; + } + + /** + * Returns the byte order of the datatype. Valid values are + *
    + *
  • ORDER_LE + *
  • ORDER_BE + *
  • ORDER_VAX + *
  • ORDER_NONE + *
+ * + * @return the byte order of the datatype. + */ + public int getDatatypeOrder() { + return datatypeOrder; + } + + /** + * Returns the sign (SIGN_NONE, SIGN_2) of an integer datatype. + * + * @return the sign of the datatype. + */ + public int getDatatypeSign() { + return datatypeSign; + } + + /** + * Returns the base datatype for this datatype. + *

+ * For example, in a dataset of type ARRAY of integer, the datatype of the dataset is ARRAY. The + * datatype of the base type is integer. + * + * @return the datatype of the contained basetype. + */ + public Datatype getDatatypeBase() { + return baseType; + } + + /** + * Sets the (key, value) pairs of enum members for enum datatype. + *

+ * For Example, + *

+ *
setEnumMembers("-40=lowTemp, 90=highTemp")
+ *
sets the key of enum member lowTemp to -40 and highTemp to 90.
+ *
setEnumMembers("lowTemp, highTemp")
+ *
sets enum members to defaults, i.e. 0=lowTemp and 1=highTemp
+ *
setEnumMembers("10=lowTemp, highTemp")
+ *
sets enum member lowTemp to 10 and highTemp to 11.
+ *
+ * + * @param enumStr + * the (key, value) pairs of enum members + */ + public final void setEnumMembers(String enumStr) { + log.trace("setEnumMembers: is_enum enum_members={}", enumStr); + enumMembers = new HashMap<>(); + String[] entries = enumStr.split(","); + for (String entry : entries) { + String[] keyValue = entry.split("="); + enumMembers.put(keyValue[0].trim(), keyValue[1].trim()); + if (log.isTraceEnabled()) + log.trace("setEnumMembers: is_enum value={} name={}", keyValue[0].trim(), keyValue[1].trim()); + } + } + + /** + * Returns the Map<String,String> pairs of enum members for enum datatype. + * + * @return enumStr Map<String,String%gt; pairs of enum members + */ + public final Map getEnumMembers() { + if (enumMembers == null) { + enumMembers = new HashMap<>(); + enumMembers.put("1", "0"); + enumMembers.put("2", "1"); + } + + return enumMembers; + } + + /** + * Returns the HashMap pairs of enum members for enum datatype. + *

+ * For Example, + *

+ *
getEnumMembersAsString()
+ *
returns "10=lowTemp, 40=highTemp"
+ *
+ * + * @return enumStr the (key, value) pairs of enum members + */ + @SuppressWarnings("rawtypes") + public final String getEnumMembersAsString() { + if (enumMembers == null) { + enumMembers = new HashMap<>(); + enumMembers.put("1", "0"); + enumMembers.put("2", "1"); + } + + StringBuilder enumStr = new StringBuilder(); + Iterator> entries = enumMembers.entrySet().iterator(); + int i = enumMembers.size(); + while (entries.hasNext()) { + Entry thisEntry = entries.next(); + enumStr.append((String) thisEntry.getKey()) + .append("=") + .append((String) thisEntry.getValue()); + + i--; + if (i > 0) + enumStr.append(", "); + } + return enumStr.toString(); + } + + /** + * Returns the dimensions of an Array Datatype. + * + * @return dims the dimensions of the Array Datatype + */ + public final long[] getArrayDims() { + return arrayDims; + } + + public final List getCompoundMemberNames() { + return compoundMemberNames; + } + + public final List getCompoundMemberTypes() { + return compoundMemberTypes; + } + + /** + * Converts the datatype object to a native datatype. + * + * Subclasses must implement it so that this datatype will be converted accordingly. Use close() to + * close the native identifier; otherwise, the datatype will be left open. + *

+ * For example, a HDF5 datatype created from
+ * + *

+     * H5Dataype dtype = new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
+     * int tid = dtype.createNative();
+     * 
+ * + * The "tid" will be the HDF5 datatype id of a 64-bit unsigned integer, which is equivalent to + * + *
+     * int tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
+     * 
+ * + * @return the identifier of the native datatype. + */ + public abstract long createNative(); + + /** + * Set datatype characteristics (class, size, byte order and sign) from a given datatype identifier. + *

+ * Sub-classes must implement it so that this datatype will be converted accordingly. + *

+ * For example, if the type identifier is a 64-bit unsigned integer created from HDF5, + * + *

+     * H5Datatype dtype = new H5Datatype();
+     * dtype.fromNative(HDF5Constants.H5T_NATIVE_UNINT32);
+     * 
+ * + * Where dtype is equivalent to
+ * new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE); + * + * @param nativeID + * the datatype identifier. + */ + public abstract void fromNative(long nativeID); + + /** + * Returns a short text description of this datatype. + * + * @return a short text description of this datatype + */ + public String getDescription() { + log.trace("getDescription(): start"); + + if (datatypeDescription != null) { + log.trace("getDescription(): finish"); + return datatypeDescription; + } + + StringBuilder description = new StringBuilder(); + + switch (datatypeClass) { + case CLASS_CHAR: + description.append("8-bit ").append((isUnsigned() ? "unsigned " : "")).append("integer"); + break; + case CLASS_INTEGER: + if (datatypeSize == NATIVE) + description.append("native ").append((isUnsigned() ? "unsigned " : "")).append("integer"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit ") + .append((isUnsigned() ? "unsigned " : "")).append("integer"); + break; + case CLASS_FLOAT: + if (datatypeSize == NATIVE) + description.append("native floating-point"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit floating-point"); + break; + case CLASS_STRING: + description.append("String"); + break; + case CLASS_REFERENCE: + description.append("Object reference"); + break; + case CLASS_OPAQUE: + if (datatypeSize == NATIVE) + description.append("native opaque"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit opaque"); + break; + case CLASS_BITFIELD: + if (datatypeSize == NATIVE) + description.append("native bitfield"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit bitfield"); + break; + case CLASS_ENUM: + if (datatypeSize == NATIVE) + description.append("native enum"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit enum"); + break; + case CLASS_ARRAY: + description.append("Array"); + + if (arrayDims != null) { + description.append(" ["); + for (int i = 0; i < arrayDims.length; i++) { + description.append(arrayDims[i]); + if (i < arrayDims.length - 1) + description.append(" x "); + } + description.append("]"); + } + + break; + case CLASS_COMPOUND: + description.append("Compound"); + break; + case CLASS_VLEN: + description.append("Variable-length"); + break; + default: + description.append("Unknown"); + break; + } + + if (baseType != null) { + description.append(" of " + baseType.getDescription()); + } + + log.trace("getDescription(): finish"); + return description.toString(); + } + + /** + * Checks if this datatype is unsigned. + * + * @return true if the datatype is unsigned; + * otherwise, returns false. + */ + public boolean isUnsigned() { + if (baseType != null) + return baseType.isUnsigned(); + else { + if (isCompound()) { + if ((compoundMemberTypes != null) && !compoundMemberTypes.isEmpty()) { + boolean allMembersUnsigned = true; + + Iterator cmpdTypeListIT = compoundMemberTypes.iterator(); + while (cmpdTypeListIT.hasNext()) { + Datatype next = cmpdTypeListIT.next(); + + allMembersUnsigned = allMembersUnsigned && next.isUnsigned(); + } + + return allMembersUnsigned; + } + else { + log.debug("isUnsigned(): compoundMemberTypes is null"); + return false; + } + } + else { + return (datatypeSign == Datatype.SIGN_NONE); + } + } + } + + public abstract boolean isText(); + + /** + * Checks if this datatype is an integer type. + * + * @return true if the datatype is integer; false otherwise + */ + public boolean isInteger() { + return (datatypeClass == Datatype.CLASS_INTEGER); + } + + /** + * Checks if this datatype is a floating-point type. + * + * @return true if the datatype is floating-point; false otherwise + */ + public boolean isFloat() { + return (datatypeClass == Datatype.CLASS_FLOAT); + } + + /** + * Checks if this datatype is a variable-length string type. + * + * @return true if the datatype is variable-length string; false otherwise + */ + public boolean isVarStr() { + return isVariableStr; + } + + /** + * Checks if this datatype is a variable-length type. + * + * @return true if the datatype is variable-length; false otherwise + */ + public boolean isVLEN() { + return isVLEN; + } + + /** + * Checks if this datatype is an compound type. + * + * @return true if the datatype is compound; false otherwise + */ + public boolean isCompound() { + return (datatypeClass == Datatype.CLASS_COMPOUND); + } + + /** + * Checks if this datatype is an array type. + * + * @return true if the datatype is array; false otherwise + */ + public boolean isArray() { + return (datatypeClass == Datatype.CLASS_ARRAY); + } + + /** + * Checks if this datatype is a string type. + * + * @return true if the datatype is string; false otherwise + */ + public boolean isString() { + return (datatypeClass == Datatype.CLASS_STRING); + } + + /** + * Checks if this datatype is a character type. + * + * @return true if the datatype is character; false otherwise + */ + public boolean isChar() { + return (datatypeClass == Datatype.CLASS_CHAR); + } + + /** + * Checks if this datatype is a reference type. + * + * @return true if the datatype is reference; false otherwise + */ + public boolean isRef() { + return (datatypeClass == Datatype.CLASS_REFERENCE); + } + + /** + * Checks if this datatype is a enum type. + * + * @return true if the datatype is enum; false otherwise + */ + public boolean isEnum() { + return (datatypeClass == Datatype.CLASS_ENUM); + } + + /** + * Checks if this datatype is a opaque type. + * + * @return true if the datatype is opaque; false otherwise + */ + public boolean isOpaque() { + return (datatypeClass == Datatype.CLASS_OPAQUE); + } + + /** + * Checks if this datatype is a bitfield type. + * + * @return true if the datatype is bitfield; false otherwise + */ + public boolean isBitField() { + return (datatypeClass == Datatype.CLASS_BITFIELD); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata() + */ + @Override + @SuppressWarnings("rawtypes") + public List getMetadata() throws Exception { + return null; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#writeMetadata(java.lang.Object) + */ + @Override + public void writeMetadata(Object info) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:writeMetadata."); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#removeMetadata(java.lang.Object) + */ + @Override + public void removeMetadata(Object info) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:removeMetadata."); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#updateMetadata(java.lang.Object) + */ + @Override + public void updateMetadata(Object info) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:updateMetadata."); + } + + @Override + public String toString() { + return getDescription(); + } +} diff --git a/src/main/java/hdf/object/FileFormat.java b/src/main/java/hdf/object/FileFormat.java new file mode 100644 index 0000000..4aff72c --- /dev/null +++ b/src/main/java/hdf/object/FileFormat.java @@ -0,0 +1,2083 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.io.File; +import java.util.Enumeration; +import java.util.Hashtable; +import java.util.Iterator; +import java.util.Map; +import java.util.StringTokenizer; +import java.util.Vector; + + +/** + * FileFormat defines general interfaces for working with files whose data is + * organized according to a supported format. + *

+ * FileFormat is a pluggable component. New implementing classes of FileFormat + * can be added to the list of supported file formats. Current implementing + * classes include H5File and H4File. By default, H5File and H4File are added to + * the list of supported file formats maintained by the static FileFormat + * instance. + * + *

+ *                                    FileFormat
+ *                       _________________|_________________
+ *                       |                |                |
+ *                     H5File          H4File           Other...
+ * 
+ *

+ * A FileFormat instance may exist without being associated with a given file. A + * FileFormat instance may be associated with a file that is not open for + * access. Most typically, a FileFormat instance is used to open the associated + * file and perform operations such as retrieval and manipulation (if the file + * access is read-write) of the file structure and objects. + * + * @author Peter X. Cao + * @version 2.4 9/4/2007 + */ +public abstract class FileFormat extends File { + private static final long serialVersionUID = -4700692313888420796L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(FileFormat.class); + + /*************************************************************************** + * File access flags used in calls to createInstance( String, flag ); + **************************************************************************/ + + /** + * File first time access flag for open file. With this access flag, added + * to the regular value, indicates this file has no existing state. + * + */ + public static final int OPEN_NEW = 1; + + /** + * File access flag for read-only permission. With this access flag, + * modifications to the file will not be allowed. + * + * @see #createInstance(String, int ) + */ + public static final int READ = 2; + + /** + * File access flag for read/write permission. With this access flag, + * modifications to the file will be allowed. Behavior if the file does not + * exist or cannot be opened for read/write access depends on the + * implementing class. + * + * @see #createInstance(String, int) + */ + public static final int WRITE = 4; + + /** + * File access flag for creating/truncating with read-write permission. If + * the file already exists, it will be truncated when opened. With this + * access flag, modifications to the file will be allowed. Behavior if file + * can't be created, or if it exists but can't be opened for read/write + * access, depends on the implementing class. + * + * @see #createInstance(String, int ) + */ + public static final int CREATE = 8; + + /*************************************************************************** + * File creation flags used in calls to createFile( String, flag ); + **************************************************************************/ + + /** + * Flag for creating/truncating a file. If the file already exists, it will + * be truncated when opened. If the file does not exist, it will be created. + * Modifications to the file will be allowed. + * + * @see #createFile(String, int ) + */ + public static final int FILE_CREATE_DELETE = 10; + + /** + * Flag for creating/opening a file. If the file already exists, it will be + * opened without changing the existing contents. If the file does not + * exist, it will be created. Modifications to the file will be allowed. + * + * @see #createFile(String, int ) + */ + public static final int FILE_CREATE_OPEN = 20; + + /** + * Flag to indicate if the earliest version of library is used when creating + * a new file. + * + * @see #createFile(String, int ) + */ + public static final int FILE_CREATE_EARLY_LIB = 40; + + + /*************************************************************************** + * Keys and fields related to supported file formats. + **************************************************************************/ + + /** Key for HDF4 file format. */ + public static final String FILE_TYPE_HDF4 = "HDF4"; + + /** Key for HDF5 file format. */ + public static final String FILE_TYPE_HDF5 = "HDF5"; + + /** Key for NetCDF file format. */ + public static final String FILE_TYPE_NC3 = "NetCDF3"; + + /** + * A separator that separates file name and object name. + * + * @see FileFormat#getHObject(String) + */ + public static final String FILE_OBJ_SEP = "://"; + + /** + * FileList keeps a list of supported FileFormats. This list can be updated + * and queried at runtime. + * + * @see #addFileFormat(String,FileFormat) + * @see #getFileFormat(String) + * @see #getFileFormatKeys() + * @see #getFileFormats() + * @see #removeFileFormat(String) + */ + private static final Map FileList = new Hashtable<>(10); + + /** + * A list of file extensions for the supported file formats. This list of + * file extensions is not integrated with the supported file formats kept in + * FileList, but is provided as a convenience for applications who may + * choose to process only those files with recognized extensions. + */ + private static String extensions = "hdf, h4, hdf5, h5, nc, fits"; + + /*************************************************************************** + * Sizing information and class metadata + **************************************************************************/ + + /** + * Current Java applications, such as HDFView, cannot handle files with + * large numbers of objects due to JVM memory limitations. For example, + * 1,000,000 objects is too many. max_members is defined so that + * applications such as HDFView will load up to max_members objects + * starting with the start_members -th object. The implementing class + * has freedom in its interpretation of how to "count" objects in the file. + */ + private int max_members = 10000; // 10,000 by default + private int start_members = 0; // 0 by default + + /** + * File identifier. -1 indicates the file is not open. + */ + protected long fid = -1; + + /** + * The absolute pathname (path+name) of the file. + */ + protected String fullFileName = null; + + /** + * Flag indicating if the file access is read-only. + */ + protected boolean isReadOnly = false; + + /*************************************************************************** + * Class initialization method + **************************************************************************/ + + /** + * By default, HDF4 and HDF5 file formats are added to the supported formats + * list. + */ + static { + // add HDF4 to default modules + if (FileFormat.getFileFormat(FILE_TYPE_HDF4) == null) { + try { + @SuppressWarnings("rawtypes") + Class fileclass = Class.forName("hdf.object.h4.H4File"); + FileFormat fileformat = (FileFormat) fileclass.newInstance(); + if (fileformat != null) { + FileFormat.addFileFormat(FILE_TYPE_HDF4, fileformat); + log.debug("FILE_TYPE_HDF4 file format added"); + } + } + catch (Exception err) { + log.debug("FILE_TYPE_HDF4 instance failure: ", err); + } + } + + // add HDF5 to default modules + if (FileFormat.getFileFormat(FILE_TYPE_HDF5) == null) { + try { + @SuppressWarnings("rawtypes") + Class fileclass = Class.forName("hdf.object.h5.H5File"); + FileFormat fileformat = (FileFormat) fileclass.newInstance(); + if (fileformat != null) { + FileFormat.addFileFormat(FILE_TYPE_HDF5, fileformat); + log.debug("FILE_TYPE_HDF5 file format added"); + } + } + catch (Exception err) { + log.debug("FILE_TYPE_HDF5 instance failure: ", err); + } + } + + // add NetCDF to default modules + if (FileFormat.getFileFormat(FILE_TYPE_NC3) == null) { + try { + @SuppressWarnings("rawtypes") + Class fileclass = Class.forName("hdf.object.nc2.NC2File"); + FileFormat fileformat = (FileFormat) fileclass.newInstance(); + if (fileformat != null) { + FileFormat.addFileFormat(FILE_TYPE_NC3, fileformat); + log.debug("NetCDF3 file format added"); + } + } + catch (Exception err) { + log.debug("NetCDF3 instance failure: ", err); + } + } + + // add FITS to default modules + if (FileFormat.getFileFormat("FITS") == null) { + try { + @SuppressWarnings("rawtypes") + Class fileclass = Class.forName("hdf.object.fits.FitsFile"); + FileFormat fileformat = (FileFormat) fileclass.newInstance(); + if (fileformat != null) { + FileFormat.addFileFormat("FITS", fileformat); + log.debug("Fits file format added"); + } + } + catch (Exception err) { + log.debug("FITS instance failure: ", err); + } + } + + } + + /*************************************************************************** + * Constructor + **************************************************************************/ + + /** + * Creates a new FileFormat instance with the given filename. + *

+ * The filename in this method call is equivalent to the pathname in the + * java.io.File class. The filename is converted into an abstract pathname + * by the File class. + *

+ * Typically this constructor is not called directly, but is called by a + * constructor of an implementing class. Applications most frequently use + * the createFile(), createInstance(), or getInstance() + * methods to generate a FileFormat instance with an associated filename. + *

+ * The file is not opened by this call. The read-only flag is set to false + * by this call. + * + * @param filename + * The filename; a pathname string. + * @throws NullPointerException + * If the filename argument is null. + * @see File#File(String) + * @see #createFile(String, int) + * @see #createInstance(String, int) + * @see #getInstance(String) + */ + public FileFormat(String filename) { + super(filename); + + fullFileName = filename; + + if ((filename != null) && (filename.length() > 0)) { + try { + fullFileName = this.getAbsolutePath(); + } + catch (Exception ex) { + log.debug("File {} getAbsolutePath failure: ", filename, ex); + } + } + isReadOnly = false; + log.trace("fullFileName={} isReadOnly={}", fullFileName, isReadOnly); + } + + /*************************************************************************** + * Class methods + **************************************************************************/ + + /** + * Adds a FileFormat with specified key to the list of supported formats. + *

+ * This method allows a new FileFormat, tagged with an identifying key, to + * be added dynamically to the list of supported File Formats. Using it, + * applications can add new File Formats at runtime. + *

+ * For example, to add a new File Format with the key "xyz" that is + * implemented by the class xyzFile in the package companyC.files, an + * application would make the following calls: + * + *

+     *    Class fileClass = Class.forName( "companyC.files.xyzFile" );
+     *    FileFormat ff = (FileFormat) fileClass.newInstance();
+     *    if ( ff != null ) {
+     *       ff.addFileFormat ("xyz", ff )
+     *    }
+     * 
+ *

+ * If either key or fileformat are + * null, or if key is already in use, the method + * returns without updating the list of supported File Formats. + * + * @param key + * A string that identifies the FileFormat. + * @param fileformat + * An instance of the FileFormat to be added. + * @see #getFileFormat(String) + * @see #getFileFormatKeys() + * @see #getFileFormats() + * @see #removeFileFormat(String) + */ + public static final void addFileFormat(String key, FileFormat fileformat) { + if ((fileformat == null) || (key == null)) { + return; + } + + key = key.trim(); + + if (!FileList.containsKey(key)) { + FileList.put(key, fileformat); + } + } + + /** + * Returns the FileFormat with specified key from the list of supported + * formats. + *

+ * This method returns a FileFormat instance, as identified by an + * identifying key, from the list of supported File Formats. + *

+ * If the specified key is in the list of supported formats, the instance of + * the associated FileFormat object is returned. If the specified key is not + * in the list of supported formats, null is returned. + * + * @param key + * A string that identifies the FileFormat. + * @return The FileFormat that matches the given key, or null + * if the key is not found in the list of supported File Formats. + * @see #addFileFormat(String,FileFormat) + * @see #getFileFormatKeys() + * @see #getFileFormats() + * @see #removeFileFormat(String) + */ + public static final FileFormat getFileFormat(String key) { + return FileList.get(key); + } + + /** + * Returns an Enumeration of keys for all supported formats. + *

+ * This method returns an Enumeration containing the unique keys (Strings) + * for the all File Formats in the list of supported File Formats. + * + * @return An Enumeration of keys that are in the list of supported formats. + * @see #addFileFormat(String,FileFormat) + * @see #getFileFormat(String) + * @see #getFileFormats() + * @see #removeFileFormat(String) + */ + @SuppressWarnings("rawtypes") + public static final Enumeration getFileFormatKeys() { + return ((Hashtable) FileList).keys(); + } + + /** + * Returns an array of supported FileFormat instances. + *

+ * This method returns an array of FileFormat instances that appear in the + * list of supported File Formats. + *

+ * If the list of supported formats is empty, null is returned. + * + * @return An array of all FileFormat instances in the list of supported + * File Formats, or null if the list is empty. + * @see #addFileFormat(String,FileFormat) + * @see #getFileFormat(String) + * @see #getFileFormatKeys() + * @see #removeFileFormat(String) + */ + @SuppressWarnings("rawtypes") + public static final FileFormat[] getFileFormats() { + int n = FileList.size(); + if (n <= 0) { + return null; + } + + int i = 0; + FileFormat[] fileformats = new FileFormat[n]; + Enumeration local_enum = ((Hashtable) FileList).elements(); + while (local_enum.hasMoreElements()) { + fileformats[i++] = (FileFormat) local_enum.nextElement(); + } + + return fileformats; + } + + /** + * Removes a FileFormat from the list of supported formats. + *

+ * This method removes a FileFormat, as identified by the specified key, + * from the list of supported File Formats. + *

+ * If the specified key is in the list of supported formats, the instance of + * the FileFormat object that is being removed from the list is returned. If + * the key is not in the list of supported formats, null is + * returned. + * + * @param key + * A string that identifies the FileFormat to be removed. + * @return The FileFormat that is removed, or null if the key + * is not found in the list of supported File Formats. + * @see #addFileFormat(String,FileFormat) + * @see #getFileFormat(String) + * @see #getFileFormatKeys() + * @see #getFileFormats() + */ + public static final FileFormat removeFileFormat(String key) { + return FileList.remove(key); + } + + /** + * Adds file extension(s) to the list of file extensions for supported file + * formats. + *

+ * Multiple extensions can be included in the single parameter if they are + * separated by commas. + *

+ * The list of file extensions updated by this call is not linked with + * supported formats that implement FileFormat objects. The file extension + * list is maintained for the benefit of applications that may choose to + * recognize only those files with extensions that appear in the list of + * file extensions for supported file formats. + *

+ * By default, the file extensions list includes: "hdf, h4, hdf5, h5" + * + * @param extension + * The file extension(s) to add. + * @see #addFileFormat(String,FileFormat) + * @see #getFileExtensions() + */ + public static final void addFileExtension(String extension) { + if ((extensions == null) || (extensions.length() <= 0)) { + extensions = extension; + } + + StringTokenizer currentExt = new StringTokenizer(extensions, ","); + Vector tokens = new Vector<>(currentExt.countTokens() + 5); + + while (currentExt.hasMoreTokens()) { + tokens.add(currentExt.nextToken().trim().toLowerCase()); + } + + currentExt = new StringTokenizer(extension, ","); + String ext = null; + while (currentExt.hasMoreTokens()) { + ext = currentExt.nextToken().trim().toLowerCase(); + if (tokens.contains(ext)) { + continue; + } + + extensions = extensions + ", " + ext; + } + + tokens.setSize(0); + } + + /** + * Returns a list of file extensions for all supported file formats. + *

+ * The extensions in the returned String are separates by commas: + * "hdf, h4, hdf5, h5" + *

+ * It is the responsibility of the application to update the file extension + * list using {@link #addFileExtension(String)} when new FileFormat + * implementations are added. + * + * @return A list of file extensions for all supported file formats. + * @see #addFileExtension(String) + */ + public static final String getFileExtensions() { + return extensions; + } + + /** + * Creates a FileFormat instance for the specified file. + *

+ * This method checks the list of supported file formats to find one that + * matches the format of the specified file. If a match is found, the method + * returns an instance of the associated FileFormat object. If no match is + * found, null is returned. + *

+ * For example, if "test_hdf5.h5" is an HDF5 file, + * FileFormat.getInstance("test_hdf5.h5") will return an instance of H5File. + *

+ * The file is not opened as part of this call. Read/write file access is + * associated with the FileFormat instance if the matching file format + * supports read/write access. Some file formats only support read access. + * + * @param filename + * A valid file name, with a relative or absolute path. + * @return An instance of the matched FileFormat; null if no + * match. + * @throws IllegalArgumentException + * If the filename argument is null or + * does not specify an existing file. + * @throws Exception + * If there are problems creating the new instance. + * @see #createFile(String, int) + * @see #createInstance(String, int) + * @see #getFileFormats() + */ + @SuppressWarnings("rawtypes") + public static final FileFormat getInstance(String filename) throws Exception { + if ((filename == null) || (filename.length() <= 0)) { + throw new IllegalArgumentException("Invalid file name: " + filename); + } + + if (!(new File(filename)).exists()) { + throw new IllegalArgumentException("File " + filename + " does not exist."); + } + + FileFormat fileFormat = null; + FileFormat knownFormat = null; + Enumeration elms = ((Hashtable) FileList).elements(); + + while (elms.hasMoreElements()) { + knownFormat = (FileFormat) elms.nextElement(); + if (knownFormat.isThisType(filename)) { + try { + fileFormat = knownFormat.createInstance(filename, WRITE); + } + catch (Exception ex) { + log.debug("File {} createInstance failure: ", filename, ex); + } + break; + } + } + + return fileFormat; + } + + /*************************************************************************** + * Implementation Class methods. These methods are related to the + * implementing FileFormat class, but not to a particular instance of that + * class. Since we can't override class methods (they can only be shadowed + * in Java), these are instance methods. + * + * The non-abstract methods just throw an exception indicating that the + * implementing class doesn't support the functionality. + **************************************************************************/ + + /** + * Returns the version of the library for the implementing FileFormat class. + *

+ * The implementing FileFormat classes have freedom in how they obtain or + * generate the version number that is returned by this method. The H5File + * and H4File implementations query the underlying HDF libraries and return + * the reported version numbers. Other implementing classes may generate the + * version string directly within the called method. + * + * @return The library version. + */ + public abstract String getLibversion(); + + /** + * Checks if the class implements the specified FileFormat. + *

+ * The Java "instanceof" operation is unable to check if an object is an + * instance of a FileFormat that is loaded at runtime. This method provides + * the "instanceof" functionality, and works for implementing classes that + * are loaded at runtime. + *

+ * This method lets applications that only access the abstract object layer + * determine the format of a given instance of the abstract class. + *

+ * For example, HDFView uses the following code to determine if a file is an + * HDF5 file: + * + *

+     * FileFormat h5F = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+     * HObject hObject = viewer.getTreeView().getCurrentObject();
+     * FileFormat thisF = hObject.getFileFormat();
+     * boolean isH5 = h5F.isThisType(thisF);
+     * 
+ * + * @param fileFormat + * The FileFormat to be checked. + * @return True if this instance implements the specified FileFormat; + * otherwise returns false. + * @see #isThisType(String) + */ + public abstract boolean isThisType(FileFormat fileFormat); + + /** + * Checks if the implementing FileFormat class matches the format of the + * specified file. + *

+ * For example, if "test.h5" is an HDF5 file, the first call to isThisType() + * in the code fragment shown will return false, and the second + * call will return true. + * + *

+     * FileFormat ncF = FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3);
+     * FileFormat h4F = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4);
+     * FileFormat h5F = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+     * boolean isH4 = h4F.isThisType("test.h5"); // false
+     *                                                                                                                                                                                   boolean isH5 = h5F.isThisType("test.h5"); // true
+     * 
+ * + * @param filename + * The name of the file to be checked. + * @return True if the format of the file matches the format of this + * instance; otherwise returns false. + * @see #isThisType(FileFormat) + */ + public abstract boolean isThisType(String filename); + + /** + * Creates a file with the specified name and returns a new FileFormat + * implementation instance associated with the file. + *

+ * This method creates a file whose format is the same as that of the + * implementing class. An instance of the FileFormat implementing class is + * created and associated with the file. That instance is returned by the + * method. + *

+ * The filename in this method call is equivalent to the pathname in the + * java.io.File class. The filename is converted into an abstract pathname + * by the File class. + *

+ * A flag controls the behavior if the named file already exists. The flag + * values and corresponding behaviors are: + *

    + *
  • FILE_CREATE_DELETE: Create a new file or truncate an existing one. + *
  • FILE_CREATE_OPEN: Create a new file or open an existing one. + *
+ *

+ * If the flag is FILE_CREATE_DELETE, the method will create a new file or + * truncate an existing file. If the flag is FILE_CREATE_OPEN and the file + * does not exist, the method will create a new file. + *

+ * This method does not open the file for access, nor does it confirm that + * the file can later be opened read/write. The file open is carried out by + * the open() call. + * + * @param filename + * The filename; a pathname string. + * @param createFlag + * The creation flag, which determines behavior when the file + * already exists. Acceptable values are + * FILE_CREATE_DELETE and + * FILE_CREATE_OPEN. + * @throws NullPointerException + * If the filename argument is null. + * @throws UnsupportedOperationException + * If the implementing class does not support the file creation + * operation. + * @throws Exception + * If the file cannot be created or if the creation flag has an + * unexpected value. The exceptions thrown vary depending on the + * implementing class. + * @see #createInstance(String, int) + * @see #getInstance(String) + * @see #open() + * + * @return the FileFormat instance. + */ + public FileFormat createFile(String filename, int createFlag) throws Exception { + // If the implementing subclass doesn't have this method then that + // format doesn't support File Creation and we throw an exception. + throw new UnsupportedOperationException("FileFormat FileFormat.createFile(...) is not implemented."); + } + + /** + * Creates a FileFormat implementation instance with specified filename and + * access. + *

+ * This method creates an instance of the FileFormat implementing class and + * sets the filename and file access parameters. + *

+ * The filename in this method call is equivalent to the pathname in the + * java.io.File class. The filename is converted into an abstract pathname + * by the File class. + *

+ * The access parameter values and corresponding behaviors at file open: + *

    + *
  • READ: Read-only access. Fail if file doesn't exist. + *
  • WRITE: Read/Write access. Behavior if file doesn't exist or can't be + * opened for read/write access depends on the implementing class. + *
  • CREATE: Read/Write access. Create a new file or truncate an existing + * one. Behavior if file can't be created, or if it exists but can't be + * opened read/write depends on the implementing class. + *
+ *

+ * Some FileFormat implementing classes may only support READ access and + * will use READ regardless of the value specified in the call. Refer to the + * implementing class documentation for details. + *

+ * This method does not open the file for access, nor does it confirm that + * the file can later be opened read/write or created. The file open is + * carried out by the open() call. + *

+ * Example (without exception handling): + * + *

+     * // Request the implementing class of FileFormat: H5File
+     * FileFormat h5file = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+     *
+     * // Create an instance of H5File object with read/write access
+     * H5File test1 = (H5File) h5file.createInstance("test_hdf5.h5",
+     *                                               FileFormat.WRITE);
+     *
+     * // Open the file and load the file structure; file id is returned.
+     * int fid = test1.open();
+     * 
+ * + * @param filename + * The filename; a pathname string. + * @param access + * The file access flag, which determines behavior when file is + * opened. Acceptable values are READ, WRITE, and + * CREATE. + * @throws NullPointerException + * If the filename argument is null. + * @throws Exception + * If the instance cannot be created or if the access flag has + * an unexpected value. The exceptions thrown vary depending on + * the implementing class. + * @see #createFile(String, int) + * @see #getInstance(String) + * @see #open() + * + * @return the FileFormat instance. + */ + public abstract FileFormat createInstance(String filename, int access) throws Exception; + + // REVIEW DOCS for createInstance() + // What if READ ONLY in implementation? What if file already open? + // Can we doc exceptions better or in implementation methods? + + /*************************************************************************** + * Final instance methods + * + * Related to a given instance of the class, but at the FileFormat level, + * not at the implementing class level. + **************************************************************************/ + + /** + * Returns the absolute path for the file. + *

+ * For example, "/samples/hdf5_test.h5". If there is no file associated with + * this FileFormat instance, null is returned. + * + * @return The full path (file path + file name) of the associated file, or + * null if there is no associated file. + */ + public final String getFilePath() { + return fullFileName; + } + + /** + * Returns file identifier of open file associated with this instance. + * + * @return The file identifer, or -1 if there is no file open. + */ + public final long getFID() { + return fid; + } + + /** + * Returns true if the file access is read-only. + *

+ * This method returns true if the file access is read-only. If the file + * access is read-write, or if there is no file associated with the + * FileFormat instance, false will be returned. + *

+ * Note that this method may return true even if the file is not open for + * access when the method is called. The file access is set by the + * createFile(), createInstance(), or getInstance() + * call, and the file is opened for access by the open() call. + * + * @return True if the file access is read-only, otherwise returns false. + * @see #createFile(String, int) + * @see #createInstance(String, int) + * @see #getInstance(String) + * @see #open() + */ + public final boolean isReadOnly() { + return isReadOnly; + } + + /** + * Sets the maximum number of objects to be loaded into memory. + *

+ * Current Java applications, such as HDFView, cannot handle files with + * large numbers of objects due to JVM memory limitations. The maximum + * number limits the number of objects that will be loaded for a given + * FileFormat instance. + *

+ * The implementing FileFormat class has freedom in how it interprets the + * maximum number. H5File, for example, will load the maximum number of + * objects for each group in the file. + * + * @param n + * The maximum number of objects to be loaded into memory. + * @see #getMaxMembers() + * @see #setStartMembers(int) + */ + public final void setMaxMembers(int n) { + max_members = n; + } + + /** + * Returns the maximum number of objects that can be loaded into memory. + * + * @return The maximum number of objects that can be loaded into memory. + * @see #setMaxMembers(int) + */ + public final int getMaxMembers() { + if (max_members<0) + return Integer.MAX_VALUE; // load the whole file + + return max_members; + } + + /** + * Sets the starting index of objects to be loaded into memory. + *

+ * The implementing FileFormat class has freedom in how it indexes objects + * in the file. + * + * @param idx + * The starting index of the object to be loaded into memory + * @see #getStartMembers() + * @see #setMaxMembers(int) + */ + public final void setStartMembers(int idx) { + start_members = idx; + } + + /** + * Returns the index of the starting object to be loaded into memory. + * + * @return The index of the starting object to be loaded into memory. + * @see #setStartMembers(int) + */ + public final int getStartMembers() { + return start_members; + } + + /** + * Returns the number of objects in memory. + *

+ * This method returns the total number of objects loaded into memory for + * this FileFormat instance. The method counts the objects that are loaded, + * which can take some time for a large number of objects. + *

+ * It is worth noting that the total number of objects in memory may be + * different than the total number of objects in the file. + *

+ * Since implementing classes have freedom in how they interpret and use the + * maximum number of members value, there may be differing numbers of + * objects in memory in different implementation instances, even with the + * same "use case". + *

+ * For example, say the use case is a file that contains 20,000 objects, the + * maximum number of members for an instance is 10,000, and the start member + * index is 1. There are 2 groups in the file. The root group contains + * 10,500 objects and the group "/g1" contains 9,500 objects. + *

+ * In an implementation that limits the total number of objects loaded to + * the maximum number of members, this method will return 10,000. + *

+ * In contrast, the H5File implementation loads up to the maximum number of + * members objects for each group in the file. So, with our use case 10,000 + * objects will be loaded in the root group and 9,500 objects will be loaded + * into group "/g1". This method will return the value 19,500, which exceeds + * the maximum number of members value. + * + * @return The number of objects in memory. + * @see #getMaxMembers() + * @see #setMaxMembers(int) + * @see #getStartMembers() + * @see #setStartMembers(int) + */ + public final int getNumberOfMembers() { + HObject rootObject = getRootObject(); + + // Account for root object + if (rootObject != null) return ((Group) rootObject).depthFirstMemberList().size() + 1; + + return 0; + } + + /*************************************************************************** + * Abstract Instance methods + * + * These methods are related to the Implementing FileFormat class and to + * particular instances of objects with those classes. + **************************************************************************/ + + /** + * Opens file and returns a file identifier. + *

+ * This method uses the filename and access + * parameters specified in the createFile(), createInstance(), + * or getInstance() call to open the file. It returns the file + * identifier if successful, or a negative value in case of failure. + *

+ * The method also loads the file structure and basic information (name, + * type) for data objects in the file into the FileFormat instance. It does + * not load the contents of any data object. + *

+ * The structure of the file is stored in a tree starting from the root + * object. + * + * @return File identifier if successful; otherwise -1. + * @throws Exception + * If the file cannot be opened. The exceptions thrown vary + * depending on the implementing class. + * @see #createFile(String, int) + * @see #createInstance(String, int) + * @see #getInstance(String) + * @see #getRootObject() + */ + public abstract long open() throws Exception; + + /** + * Closes file associated with this instance. + *

+ * This method closes the file associated with this FileFormat instance, as + * well as all objects associated with the file. + * + * @throws Exception + * If the file or associated objects cannot be closed. The + * exceptions thrown vary depending on the implementing class. + * @see #open() + */ + public abstract void close() throws Exception; + + // REVIEW DOCS for close() + // What if we try to close a file whose fid is -1? Does this set fid to -1? + // What if it's not open? What if no file? are structures & root object + // still loaded? + // Can we doc exceptions better or in implementation methods? + + /** + * Returns the root object for the file associated with this instance. + *

+ * The root object is an HObject that represents the root group of a + * file. If the file has not yet been opened, or if there is no file + * associated with this instance, null will be returned. + *

+ * Starting from the root, applications can descend through the tree + * structure and navigate among the file's objects. In the tree structure, + * internal items represent non-empty groups. Leaf items represent datasets, + * named datatypes, or empty groups. + * + * @return The root object of the file, or null if there is no + * associated file or if the associated file has not yet been + * opened. + * @see #open() + */ + public abstract HObject getRootObject(); + + /** + * Gets the HObject with the specified path from the file. + *

+ * This method returns the specified object from the file associated with + * this FileFormat instance. + *

+ * If the specified object is a group, groups and datasets that are members + * of the group will be accessible via the returned HObject instance. The + * exact contents of the returned HObject instance depends on whether or not + * {@link #open()} was called previously for this file. + *

    + *
  • If the file was opened prior to this method call, the complete tree + * of objects under the group will be accessible via the returned HObject + * instance. + *
  • If the file was not opened prior to this method call, only the + * members immediately under the group will be accessible via the returned + * HOBject instance. + *
+ *

+ * The decision to have different behaviors was made to give users some + * control over the "cost" of the method. In many cases, a user wants only + * one level of a tree, and the performance penalty for loading the entire + * hierarchy of objects in a large and complex file can be significant. In + * the case where open() has already been called, the HObject + * instances have already been created in memory and can be returned + * quickly. If open() has not been called, this method creates the + * HObject instances before returning the requested HObject. + *

+ * For example, say we have the following structure in our file: + * + *

+     *        /g0                      Group
+     *        /g0/dataset_comp         Dataset {50, 10}
+     *        /g0/dataset_int          Dataset {50, 10}
+     *        /g0/g00                  Group
+     *        /g0/g00/dataset_float    Dataset {50, 10}
+     *        /g0/g01                  Group
+     *        /g0/g01/dataset_string   Dataset {50, 10}
+     * 
+ * + *
    + *
  • If open() is called before get(), the full structure of + * file is loaded into memory. The call get("/g0") returns the + * instance for /g0 with the information necessary to access + * /g0/dataset_comp, /g0/dataset_int, /g0/g00, /g0/g00/dataset_float, + * /g0/g01, and /g0/g01/dataset_string. + *
  • If open() is not called before get(), only the objects + * immediately under the specified group are accessible via the returned + * HObject instance. In this example, the call get("/go") + * returns the instance for /g0 with the information necessary to access + * /g0/dataset_comp, /g0/dataset_int, /g0/g00, and /g0/g01. + *
+ * + * @param path + * Full path of the data object to be returned. + * @return The object if it exists in the file; otherwise null. + * @throws Exception + * If there are unexpected problems in trying to retrieve the + * object. The exceptions thrown vary depending on the + * implementing class. + */ + public abstract HObject get(String path) throws Exception; + + // REVIEW DOCS for get(); What if no file associated w/ instance? + // Look at exceptions. Confirm example. Make sure perf tradeoffs + // documented properly. + + /** + * Creates a named datatype in a file. + *

+ * The following code creates a named datatype in a file. + * + *

+     * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE);
+     * H5Datatype dtype = file.createDatatype(
+     *                             Datatype.CLASS_INTEGER,
+     *                             4,
+     *                             Datatype.NATIVE,
+     *                             Datatype.NATIVE,
+     *                             "Native Integer");
+     * 
+ * + * @param tclass + * class of datatype, e.g. Datatype.CLASS_INTEGER + * @param tsize + * size of the datatype in bytes, e.g. 4 for 32-bit integer. + * @param torder + * order of the byte endianing, e.g. Datatype.ORDER_LE. + * @param tsign + * signed or unsigned of an integer, e.g. Datatype.SIGN_NONE. + * @param name + * name of the datatype to create, e.g. "Native Integer". + * @return The new datatype if successful; otherwise returns null. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public abstract Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception; + + /** + * Creates a named datatype in a file. + *

+ * The following code creates a named datatype in a file. + * + *

+     * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE);
+     * H5Datatype dtype = file.createDatatype(
+     *                             Datatype.CLASS_INTEGER,
+     *                             4,
+     *                             Datatype.NATIVE,
+     *                             Datatype.NATIVE,
+     *                             basetype,
+     *                             "Native Integer");
+     * 
+ * + * @param tclass + * class of datatype, e.g. Datatype.CLASS_INTEGER + * @param tsize + * size of the datatype in bytes, e.g. 4 for 32-bit integer. + * @param torder + * order of the byte endianing, e.g. Datatype.ORDER_LE. + * @param tsign + * signed or unsigned of an integer, e.g. Datatype.SIGN_NONE. + * @param tbase + * the base datatype of the new datatype + * @param name + * name of the datatype to create, e.g. "Native Integer". + * @return The new datatype if successful; otherwise returns null. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) throws Exception + { + // Derived classes must override this function to use base type option + return createDatatype(tclass, tsize, torder, tsign, name); + } + + // REVIEW DOCS for createDatatype(). Check and document exceptions. + + /*************************************************************************** + * Methods related to Datatypes and HObjects in the implementing FileFormat. + * + * Strictly speaking, these methods aren't related to FileFormat and the + * actions could be carried out through the HObject and Datatype classes. + * But, in some cases they allow a null input and expect the generated + * object to be of a type that has particular FileFormat. Therefore, we put + * them in the implementing FileFormat class so that we create the proper + * type of HObject... H5Group or H4Group for example. + * + * Here again, if there could be Implementation Class methods we'd use + * those. But, since we can't override class methods (they can only be + * shadowed in Java), these are instance methods. + * + * The non-abstract methods just throw an exception indicating that the + * implementing class doesn't support the functionality. + **************************************************************************/ + + /** + * Creates a new datatype in memory. + *

+ * The following code creates an instance of H5Datatype in memory. + * + *

+     * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE);
+     * H5Datatype dtype = file.createDatatype(
+     *                             Datatype.CLASS_INTEGER,
+     *                             4,
+     *                             Datatype.NATIVE,
+     *                             Datatype.NATIVE);
+     * 
+ * + * @param tclass + * class of datatype, e.g. Datatype.CLASS_INTEGER + * @param tsize + * size of the datatype in bytes, e.g. 4 for 32-bit integer. + * @param torder + * order of the byte endian, e.g. Datatype.ORDER_LE. + * @param tsign + * signed or unsigned of an integer, e.g. Datatype.SIGN_NONE. + * @return The new datatype object if successful; otherwise returns null. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public abstract Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception; + + /** + * Creates a new datatype in memory. + *

+ * The following code creates an instance of H5Datatype in memory. + * + *

+     * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE);
+     * H5Datatype dtype = file.createDatatype(
+     *                             Datatype.CLASS_INTEGER,
+     *                             4,
+     *                             Datatype.NATIVE,
+     *                             Datatype.NATIVE,
+     *                             basetype);
+     * 
+ * + * @param tclass + * class of datatype, e.g. Datatype.CLASS_INTEGER + * @param tsize + * size of the datatype in bytes, e.g. 4 for 32-bit integer. + * @param torder + * order of the byte endian, e.g. Datatype.ORDER_LE. + * @param tsign + * signed or unsigned of an integer, e.g. Datatype.SIGN_NONE. + * @param tbase + * the base datatype of the new datatype + * @return The new datatype object if successful; otherwise returns null. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception + { + // Derived classes must override this function to use base type option + return createDatatype(tclass, tsize, torder, tsign); + } + + // REVIEW DOCS for createDatatype(). Check and document exceptions. + + /** + * Creates a new dataset in a file with/without chunking/compression. + *

+ * The following example creates a 2D integer dataset of size 100X50 at the root group in an HDF5 + * file. + * + *

+     * String name = "2D integer";
+     * Group pgroup = (Group) getRootObject();
+     * Datatype dtype = new H5Datatype(Datatype.CLASS_INTEGER, // class
+     *         4, // size in bytes
+     *         Datatype.ORDER_LE, // byte order
+     *         Datatype.SIGN_NONE); // unsigned
+     * long[] dims = { 100, 50 };
+     * long[] maxdims = dims;
+     * long[] chunks = null; // no
+     * // chunking
+     * int gzip = 0; // no compression
+     * Object data = null; // no initial data values
+     * Dataset d = (H5File) file.createScalarDS(name, pgroup, dtype, dims, maxdims, chunks, gzip, data);
+     * 
+ * + * @param name + * name of the new dataset, e.g. "2D integer" + * @param pgroup + * parent group where the new dataset is created. + * @param type + * datatype of the new dataset. + * @param dims + * dimension sizes of the new dataset, e.g. long[] dims = {100, 50}. + * @param maxdims + * maximum dimension sizes of the new dataset, null if maxdims is the same as dims. + * @param chunks + * chunk sizes of the new dataset, null if no chunking. + * @param gzip + * GZIP compression level (1 to 9), 0 or negative values if no compression. + * @param fillValue + * default value. + * @param data + * data written to the new dataset, null if no data is written to the new dataset. + * + * @return The new dataset if successful; otherwise returns null + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public abstract Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, + long[] chunks, int gzip, Object fillValue, Object data) throws Exception; + + public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, + int gzip, Object data) throws Exception { + return createScalarDS(name, pgroup, type, dims, maxdims, chunks, gzip, null, data); + } + + // REVIEW DOCS for createScalarDS(). Check and document exceptions. + + /** + * Creates a new compound dataset in a file with/without chunking and + * compression. + *

+ * The following example creates a compressed 2D compound dataset with size + * of 100X50 in a root group. The compound dataset has two members, x and y. + * Member x is an interger, member y is an 1-D float array of size 10. + * + *

+     * String name = "2D compound";
+     * Group pgroup = (Group) getRootObject();
+     * long[] dims = {100, 50};
+     * long[] chunks = {1, 50};
+     * int gzip = 9;
+     * String[] memberNames = {"x", "y"};
+     *
+     * Datatype[] memberDatatypes = {
+     *     new H5Datatype(Datatype.CLASS_INTEGER, Datatype.NATIVE,
+     *                    Datatype.NATIVE, Datatype.NATIVE)
+     *     new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE,
+     *                    Datatype.NATIVE, Datatype.NATIVE));
+     *
+     * int[] memberSizes = {1, 10};
+     * Object data = null; // no initial data values
+     * Dataset d = (H5File)file.createCompoundDS(name, pgroup, dims, null,
+     *           chunks, gzip, memberNames, memberDatatypes, memberSizes, null);
+     * 
+ * + * @param name + * name of the new dataset + * @param pgroup + * parent group where the new dataset is created. + * @param dims + * dimension sizes of the new dataset. + * @param maxdims + * maximum dimension sizes of the new dataset, null if maxdims is + * the same as dims. + * @param chunks + * chunk sizes of the new dataset, null if no chunking. + * @param gzip + * GZIP compression level (1 to 9), 0 or negative values if no + * compression. + * @param memberNames + * names of the members. + * @param memberDatatypes + * datatypes of the members. + * @param memberSizes + * array sizes of the members. + * @param data + * data written to the new dataset, null if no data is written to + * the new dataset. + * + * @return new dataset object if successful; otherwise returns null + * @throws UnsupportedOperationException + * If the implementing class does not support compound datasets. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, + String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception + // REVIEW DOCS for createCompoundDS(). Check and document exceptions. + { + // If the implementing subclass doesn't have this method then that + // format doesn't support Compound DataSets and we throw an + // exception. + throw new UnsupportedOperationException("Dataset FileFormat.createCompoundDS(...) is not implemented."); + } + + /** + * Creates a new image in a file. + *

+ * The following example creates a 2D image of size 100X50 in a root group. + * + *

+     * String name = "2D image";
+     * Group pgroup = (Group) getRootObject();
+     * Datatype dtype = new H5Datatype(Datatype.CLASS_INTEGER, 1, Datatype.NATIVE, Datatype.SIGN_NONE);
+     * long[] dims = {100, 50};
+     * long[] maxdims = dims;
+     * long[] chunks = null; // no chunking
+     * int gzip = 0; // no compression
+     * int ncomp = 3; // RGB true color image
+     * int interlace = ScalarDS.INTERLACE_PIXEL;
+     * Object data = null; // no initial data values
+     * Dataset d = (H5File) file.createScalarDS(name, pgroup, dtype, dims, maxdims, chunks, gzip, ncomp, interlace, data);
+     * 
+ * + * @param name + * name of the new image, "2D image". + * @param pgroup + * parent group where the new image is created. + * @param type + * datatype of the new image. + * @param dims + * dimension sizes of the new dataset, e.g. long[] dims = {100, + * 50}. + * @param maxdims + * maximum dimension sizes of the new dataset, null if maxdims is + * the same as dims. + * @param chunks + * chunk sizes of the new dataset, null if no chunking. + * @param gzip + * GZIP compression level (1 to 9), 0 or negative values if no + * compression. + * @param ncomp + * number of components of the new image, e.g. int ncomp = 3; // + * RGB true color image. + * @param interlace + * interlace mode of the image. Valid values are + * ScalarDS.INTERLACE_PIXEL, ScalarDS.INTERLACE_PLANEL and + * ScalarDS.INTERLACE_LINE. + * @param data + * data value of the image, null if no data. + * + * @return The new image object if successful; otherwise returns null + * + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public abstract Dataset createImage( + String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, int gzip, int ncomp, + int interlace, Object data) throws Exception; + + // REVIEW DOCS for createImage(). Check and document exceptions. + + /** + * Creates a new group with specified name in existing group. + *

+ * If the parent group is null, the new group will be created in the root + * group. + * + * @param name + * The name of the new group. + * @param parentGroup + * The parent group, or null. + * + * @return The new group if successful; otherwise returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public abstract Group createGroup(String name, Group parentGroup) throws Exception; + + // REVIEW DOCS for createLink(). + // Verify Implementing classes document these and also + // 'do the right thing' if fid is -1, currentObj is non-null, if + // object is null, or the root group then what? document & verify! + + /** + * Creates a soft, hard or external link to an existing object in the open + * file. + *

+ * If parentGroup is null, the new link is created in the root group. + * + * @param parentGroup + * The group where the link is created. + * @param name + * The name of the link. + * @param currentObj + * The existing object the new link will reference. + * @param type + * The type of link to be created. It can be a hard link, a soft + * link or an external link. + * + * @return The object pointed to by the new link if successful; otherwise + * returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public HObject createLink(Group parentGroup, String name, HObject currentObj, int type) throws Exception { + return createLink(parentGroup, name, currentObj); + } + + /** + * Creates a soft or external link to an object in a file that does not exist + * at the time the link is created. + * + * @param parentGroup + * The group where the link is created. + * @param name + * The name of the link. + * @param currentObj + * The name of the object the new link will reference. The object + * doesn't have to exist. + * @param type + * The type of link to be created. + * + * @return The H5Link object pointed to by the new link if successful; + * otherwise returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public HObject createLink(Group parentGroup, String name, String currentObj, int type) throws Exception { + return createLink(parentGroup, name, currentObj); + } + + /** + * Copies the source object to a new destination. + *

+ * This method copies the source object to a destination group, and assigns + * the specified name to the new object. + *

+ * The copy may take place within a single file or across files. If the source + * object and destination group are in different files, the files must have + * the same file format (both HDF5 for example). + *

+ * The source object can be a group, a dataset, or a named datatype. This + * method copies the object along with all of its attributes and other + * properties. If the source object is a group, this method also copies all + * objects and sub-groups below the group. + *

+ * The following example shows how to use the copy method to create two + * copies of an existing HDF5 file structure in a new HDF5 file. One copy + * will be under /copy1 and the other under /copy2 in the new file. + * + *

+     * // Open the existing file with the source object.
+     * H5File existingFile = new H5File("existingFile.h5", FileFormat.READ);
+     * existingFile.open();
+     * // Our source object will be the root group.
+     * HObject srcObj = existingFile.get("/");
+     * // Create a new file.
+     * H5File newFile = new H5File("newFile.h5", FileFormat.CREATE);
+     * newFile.open();
+     * // Both copies in the new file will have the root group as their
+     * // destination group.
+     * Group dstGroup = (Group) newFile.get("/");
+     * // First copy goes to "/copy1" and second goes to "/copy2".
+     * // Notice that we can use either H5File instance to perform the copy.
+     * HObject copy1 = existingFile.copy(srcObj, dstGroup, "copy1");
+     * HObject copy2 = newFile.copy(srcObj, dstGroup, "copy2");
+     * // Close both the files.
+     * file.close();
+     * newFile.close();
+     * 
+ * + * @param srcObj + * The object to copy. + * @param dstGroup + * The destination group for the new object. + * @param dstName + * The name of the new object. If dstName is null, the name of + * srcObj will be used. + * + * @return The new object, or null if the copy fails. + * + * @throws Exception + * are specific to the implementing class. + */ + public abstract HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception; + + /** + * Deletes an object from a file. + * + * @param obj + * The object to delete. + * @throws Exception + * The exceptions thrown vary depending on the implementing + * class. + */ + public abstract void delete(HObject obj) throws Exception; + + // REVIEW DOCS for delete(). Check and document exceptions. + + /** + * Attaches a given attribute to an object. + *

+ * If an HDF(4&5) attribute exists in file, the method updates its value. If + * the attribute does not exist in file, it creates the attribute in file + * and attaches it to the object. It will fail to write a new attribute to + * the object where an attribute with the same name already exists. To + * update the value of an existing attribute in file, one needs to get the + * instance of the attribute by getMetadata(), change its values, and use + * writeAttribute() to write the value. + * + * @param obj + * The object to which the attribute is attached to. + * @param attr + * The atribute to attach. + * @param attrExisted + * The indicator if the given attribute exists. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public abstract void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws Exception; + + // REVIEW DOCS for writeAttribute(). Check and document exceptions. + + /*************************************************************************** + * Deprecated methods. + **************************************************************************/ + + /** + * @deprecated As of 2.4, replaced by {@link #createFile(String, int)} + *

+ * The replacement method has an additional parameter that + * controls the behavior if the file already exists. Use + * FileFormat.FILE_CREATE_DELETE as the second + * argument in the replacement method to mimic the behavior + * originally provided by this method. + * + * @param fileName + * The filename; a pathname string. + * + * @return the created file object + * + * @throws Exception if file cannot be created + */ + @Deprecated + public final FileFormat create(String fileName) throws Exception { + return createFile(fileName, FileFormat.FILE_CREATE_DELETE); + } + + /** + * @deprecated As of 2.4, replaced by {@link #createInstance(String, int)} + * + * The replacement method has identical functionality and a more + * descriptive name. Since open is used elsewhere to + * perform a different function this method has been deprecated. + * + * @param pathname + * The pathname string. + * @param access + * The file access properties + * + * @return the opened file object + * + * @throws Exception if the file cannot be opened + */ + @Deprecated + public final FileFormat open(String pathname, int access) throws Exception { + return createInstance(pathname, access); + } + + /** + * @deprecated As of 2.4, replaced by + * {@link #createCompoundDS(String, Group, long[], long[], long[], int, String[], Datatype[], int[], Object)} + *

+ * The replacement method has additional parameters: + * maxdims, chunks, and gzip. To mimic + * the behavior originally provided by this method, call the + * replacement method with the following parameter list: + * ( name, pgroup, dims, null, null, -1, + * memberNames, memberDatatypes, memberSizes, data ); + * + * @param name + * The dataset name. + * @param pgroup + * The dataset parent. + * @param dims + * The dataset dimensions. + * @param memberNames + * The dataset compound member names. + * @param memberDatatypes + * The dataset compound member datatypes. + * @param memberSizes + * The dataset compound member sizes. + * @param data + * The dataset data. + * + * @return + * The dataset created. + * + * @throws Exception if the dataset cannot be created + */ + @Deprecated + public final Dataset createCompoundDS(String name, Group pgroup, long[] dims, String[] memberNames, + Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { + return createCompoundDS(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberSizes, data); + } + + /** + * @deprecated As of 2.4, replaced by {@link #copy(HObject, Group, String)} + *

+ * To mimic the behavior originally provided by this method, + * call the replacement method with null as the 3rd + * parameter. + * + * @param srcObj + * The object to be copied + * @param dstGroup + * The group to contain the copied object + * + * @return the copied object + * + * @throws Exception if object can not be copied + */ + @Deprecated + public final HObject copy(HObject srcObj, Group dstGroup) throws Exception { + return copy(srcObj, dstGroup, null); + } + + /** + * @deprecated As of 2.4, replaced by {@link #get(String)} + *

+ * This static method, which as been deprecated, causes two + * problems: + *

    + *
  • It can be very expensive if it is called many times or in + * a loop because each call to the method creates an instance of + * a file. + *
  • Since the method does not return the instance of the + * file, the file cannot be closed directly and may be left open + * (memory leak). The only way to close the file is through the + * object returned by this method. + *
+ * + * @param fullPath + * The file path string. + * + * @return the object that has the given full path + * + * @throws Exception if the object can not be found + */ + @Deprecated + public static final HObject getHObject(String fullPath) throws Exception { + if ((fullPath == null) || (fullPath.length() <= 0)) { + return null; + } + + String filename = null, path = null; + int idx = fullPath.indexOf(FILE_OBJ_SEP); + + if (idx > 0) { + filename = fullPath.substring(0, idx); + path = fullPath.substring(idx + FILE_OBJ_SEP.length()); + if ((path == null) || (path.length() == 0)) { + path = "/"; + } + } + else { + filename = fullPath; + path = "/"; + } + + return FileFormat.getHObject(filename, path); + }; + + /** + * @deprecated As of 2.4, replaced by {@link #get(String)} + *

+ * This static method, which as been deprecated, causes two + * problems: + *

    + *
  • It can be very expensive if it is called many times or in + * a loop because each call to the method creates an instance of + * a file. + *
  • Since the method does not return the instance of the + * file, the file cannot be closed directly and may be left open + * (memory leak). The only way to close the file is through the + * object returned by this method, for example: + *
    +     * Dataset dset = H5File.getObject("hdf5_test.h5", "/images/iceburg");
    +     * ...
    +     * // close the file through dset
    +     * dset.getFileFormat().close();
    +     * 
    + * + *
  • + *
+ * + * @param filename + * The filename string. + * @param path + * The path of the file + * + * @return the object that has the given filename and path returns null + * + * @throws Exception if the object can not be found + */ + @Deprecated + public static final HObject getHObject(String filename, String path) throws Exception { + if ((filename == null) || (filename.length() <= 0)) { + throw new IllegalArgumentException("Invalid file name. " + filename); + } + + if (!(new File(filename)).exists()) { + throw new IllegalArgumentException("File does not exists"); + } + + HObject obj = null; + FileFormat file = FileFormat.getInstance(filename); + + if (file != null) { + obj = file.get(path); + if (obj == null) { + file.close(); + } + } + + return obj; + } + + /** + * Finds an object by its object ID + * + * @param file + * the file containing the object + * @param oid + * the oid to search for + * + * @return the object that has the given OID; otherwise returns null + */ + public static final HObject findObject(FileFormat file, long[] oid) { + log.trace("findObject(): start"); + + if ((file == null) || (oid == null)) { + log.debug("findObject(): file is null or oid is null"); + log.trace("findObject(): finish"); + return null; + } + + HObject theObj = null; + + HObject theRoot = file.getRootObject(); + if (theRoot == null) { + log.debug("findObject(): rootObject is null"); + log.trace("findObject(): finish"); + return null; + } + + Iterator member_it = ((Group) theRoot).breadthFirstMemberList().iterator(); + while (member_it.hasNext()) { + theObj = member_it.next(); + if (theObj.equalsOID(oid)) break; + } + + return theObj; + } + + /** + * Finds an object by the full path of the object (path+name) + * + * @param file + * the file containing the object + * @param path + * the full path of the object to search for + * + * @return the object that has the given path; otherwise returns null + */ + public static final HObject findObject(FileFormat file, String path) { + log.trace("findObject({}): start", path); + + if ((file == null) || (path == null)) { + log.debug("findObject(): file is null or path is null"); + log.trace("findObject(): finish"); + return null; + } + + if (!path.endsWith("/")) { + path = path + "/"; + } + + HObject theRoot = file.getRootObject(); + + if (theRoot == null) { + log.debug("findObject(): rootObject is null"); + log.trace("findObject(): finish"); + return null; + } + else if (path.equals("/")) { + log.debug("findObject() path is rootObject"); + log.trace("findObject(): finish"); + return theRoot; + } + + Iterator member_it = ((Group) theRoot).breadthFirstMemberList().iterator(); + HObject theObj = null; + while (member_it.hasNext()) { + theObj = member_it.next(); + String fullPath = theObj.getFullName() + "/"; + + if (path.equals(fullPath) && theObj.getPath() != null) { + break; + } + else { + theObj = null; + } + } + + log.trace("findObject(): finish"); + return theObj; + } + + // //////////////////////////////////////////////////////////////////////////////////// + // Added to support HDF5 1.8 features // + // //////////////////////////////////////////////////////////////////////////////////// + + /** + * Opens file and returns a file identifier. + * + * @param indexList + * The property list is the list of parameters, like index type + * and the index order. The index type can be alphabetical or + * creation. The index order can be increasing order or + * decreasing order. + * + * @return File identifier if successful; otherwise -1. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public long open(int... indexList) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:open."); + } + + /** + * Creates a new group with specified name in existing group. + *

+ * If the parent group is null, the new group will be created in the root + * group. + * + * @param name + * The name of a new group. + * @param pgroup + * The parent group object. + * @param gplist + * The group creation properties, in which the order of the + * properties conforms the HDF5 library API, H5Gcreate(), i.e. + * lcpl, gcpl and gapl, where + *

    + *
  • lcpl : Property list for link creation
  • gcpl : Property + * list for group creation
  • gapl : Property list for group + * access + *
+ * + * @return The new group if successful; otherwise returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public Group createGroup(String name, Group pgroup, long... gplist) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:createGroup."); + } + + /*** + * Creates the group creation property list identifier, gcpl. This + * identifier is used when creating Groups. + * + * @param creationorder + * The order in which the objects in a group should be created. + * It can be Tracked or Indexed. + * @param maxcompact + * The maximum number of links to store in the group in a compact + * format. + * @param mindense + * The minimum number of links to store in the indexed + * format.Groups which are in indexed format and in which the + * number of links falls below this threshold are automatically + * converted to compact format. + * + * @return The gcpl identifier. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public long createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:createGcpl."); + } + + /** + * Creates a link to an existing object in the open file. + *

+ * If linkGroup is null, the new link is created in the root group. + * + * @param linkGroup + * The group where the link is created. + * @param name + * The name of the link. + * @param currentObj + * The existing object the new link will reference. + * + * @return The object pointed to by the new link if successful; otherwise + * returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public HObject createLink(Group linkGroup, String name, Object currentObj) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:createLink."); + } + + /** + * Export dataset. + * + * @param file_export_name + * The file name to export data into. + * @param file_name + * The name of the HDF5 file containing the dataset. + * @param object_path + * The full path of the dataset to be exported. + * @param binary_order + * The data byte order + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:exportDataset."); + } + + /** + * Renames an attribute. + * + * @param obj + * The object whose attribute is to be renamed. + * @param oldAttrName + * The current name of the attribute. + * @param newAttrName + * The new name of the attribute. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:renameAttribute."); + } + + /** + * Sets the bounds of new library versions. + * + * @param lowStr + * The earliest version of the library. + * @param highStr + * The latest version of the library. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public void setNewLibBounds(String lowStr, String highStr) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:setNewLibBounds."); + } + + /** + * Sets the bounds of library versions. + * + * @param lowStr + * The earliest version of the library. + * @param highStr + * The latest version of the library. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public void setLibBounds(String lowStr, String highStr) throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:setLibBounds."); + } + + /** + * Gets the bounds of library versions + * + * @return The earliest and latest library versions in an int array. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + public int[] getLibBounds() throws Exception { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getLibBounds."); + } + + public String getLibBoundsDescription() { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getLibBoundsDescription."); + } + + public static int getIndexTypeValue(String strtype) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getIndexTypeValue."); + } + + public int getIndexType(String strtype) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getIndexType."); + } + + public void setIndexType(int indexType) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:setIndexType."); + } + + public static int getIndexOrderValue(String strorder) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getIndexOrderValue."); + } + + public int getIndexOrder(String strorder) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:getIndexOrder."); + } + + public void setIndexOrder(int indexOrder) { + throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement FileFormat:setIndexOrder."); + } +} diff --git a/src/main/java/hdf/object/Group.java b/src/main/java/hdf/object/Group.java new file mode 100644 index 0000000..016da33 --- /dev/null +++ b/src/main/java/hdf/object/Group.java @@ -0,0 +1,326 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import java.util.Stack; +import java.util.Vector; + +/** + * Group is an abstract class. Current implementing classes are the H4Group and + * H5Group. This class includes general information of a group object such as + * members of a group and common operations on groups. + *

+ * Members of a group may include other groups, datasets or links. + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public abstract class Group extends HObject implements MetaDataContainer { + + private static final long serialVersionUID = 3913174542591568052L; + + /** + * The list of members (Groups and Datasets) of this group in memory. + */ + private List memberList; + + /** + * The parent group where this group is located. The parent of the root + * group is null. + */ + protected Group parent; + + /** + * Total number of members of this group in file. + */ + protected int nMembersInFile; + + public static final int LINK_TYPE_HARD = 0; + + public static final int LINK_TYPE_SOFT = 1; + + public static final int LINK_TYPE_EXTERNAL = 64; + + public static final int CRT_ORDER_TRACKED = 1; + + public static final int CRT_ORDER_INDEXED = 2; + + + /** + * Constructs an instance of the group with specific name, path and parent + * group. An HDF data object must have a name. The path is the group path + * starting from the root. The parent group is the group where this group is + * located. + *

+ * For example, in H5Group(h5file, "grp", "/groups/", pgroup), "grp" is the + * name of the group, "/groups/" is the group path of the group, and pgroup + * is the group where "grp" is located. + * + * @param theFile + * the file containing the group. + * @param grpName + * the name of this group, e.g. "grp01". + * @param grpPath + * the full path of this group, e.g. "/groups/". + * @param grpParent + * the parent of this group. + */ + public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent) { + this(theFile, grpName, grpPath, grpParent, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #Group(FileFormat, String, String, Group)} + * + * @param theFile + * the file containing the group. + * @param grpName + * the name of this group, e.g. "grp01". + * @param grpPath + * the full path of this group, e.g. "/groups/". + * @param grpParent + * the parent of this group. + * @param oid + * the oid of this group. + */ + @Deprecated + public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent, long[] oid) { + super(theFile, grpName, grpPath, oid); + + this.parent = grpParent; + } + + /** + * Clears up member list and other resources in memory for the group. Since + * the destructor will clear memory space, the function is usually not + * needed. + */ + public void clear() { + if (memberList != null) { + ((Vector) memberList).setSize(0); + } + } + + /** + * Adds an object to the member list of this group in memory. + * + * @param object + * the HObject to be added to the member list. + */ + public void addToMemberList(HObject object) { + if (memberList == null) { + int size = Math.min(getNumberOfMembersInFile(), this + .getFileFormat().getMaxMembers()); + memberList = new Vector<>(size + 5); + } + + if ((object != null) && !memberList.contains(object)) { + memberList.add(object); + } + } + + /** + * Removes an object from the member list of this group in memory. + * + * @param object + * the HObject (Group or Dataset) to be removed from the member + * list. + */ + public void removeFromMemberList(HObject object) { + if (memberList != null) { + memberList.remove(object); + } + } + + /** + * Returns the list of members of this group. The list is an java.util.List + * containing HObjects. + * + * @return the list of members of this group. + */ + public List getMemberList() { + FileFormat theFile = this.getFileFormat(); + + if ((memberList == null) && (theFile != null)) { + int size = Math.min(getNumberOfMembersInFile(), this.getFileFormat().getMaxMembers()); + memberList = new Vector<>(size + 5); // avoid infinite loop search for groups without members + + // find the memberList from the file by checking the group path and + // name. group may be created out of the structure tree + // (H4/5File.loadTree()). + if (theFile.getFID() < 0) { + try { + theFile.open(); + } // load the file structure; + catch (Exception ex) { + ; + } + } + + HObject root = theFile.getRootObject(); + if (root == null) return memberList; + + Iterator it = ((Group) root).depthFirstMemberList().iterator(); + Group g = null; + Object uObj = null; + while (it.hasNext()) { + uObj = it.next(); + + if (uObj instanceof Group) { + g = (Group) uObj; + if (g.getPath() != null) // add this check to get rid of null exception + { + if ((this.isRoot() && g.isRoot()) + || (this.getPath().equals(g.getPath()) && + g.getName().endsWith(this.getName()))) { + memberList = g.getMemberList(); + break; + } + } + } + } + } + + return memberList; + } + + /** + * @return the members of this Group in breadth-first order. + */ + public List breadthFirstMemberList() { + Vector members = new Vector<>(); + Queue queue = new LinkedList<>(); + HObject currentObj = this; + + queue.addAll(((Group) currentObj).getMemberList()); + + while(!queue.isEmpty()) { + currentObj = queue.remove(); + members.add(currentObj); + + if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) { + queue.addAll(((Group) currentObj).getMemberList()); + } + } + + return members; + } + + /** + * @return the members of this Group in depth-first order. + */ + public List depthFirstMemberList() { + Vector members = new Vector<>(); + Stack stack = new Stack<>(); + HObject currentObj = this; + + // Push elements onto the stack in reverse order + List list = ((Group) currentObj).getMemberList(); + for(int i = list.size() - 1; i >= 0; i--) { + stack.push(list.get(i)); + } + + while(!stack.empty()) { + currentObj = stack.pop(); + members.add(currentObj); + + if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) { + list = ((Group) currentObj).getMemberList(); + for(int i = list.size() - 1; i >= 0; i--) { + stack.push(list.get(i)); + } + } + } + + return members; + } + + /** + * Sets the name of the group. + *

+ * setName (String newName) changes the name of the group in memory and + * file. + *

+ * setName() updates the path in memory for all the objects that are under + * the group with the new name. + * + * @param newName + * The new name of the group. + * + * @throws Exception if the name can not be set + */ + @Override + public void setName(String newName) throws Exception { + super.setName(newName); + + if (memberList != null) { + int n = memberList.size(); + HObject theObj = null; + for (int i = 0; i < n; i++) { + theObj = memberList.get(i); + theObj.setPath(this.getPath() + newName + HObject.SEPARATOR); + } + } + } + + /** @return the parent group. */ + public final Group getParent() { + return parent; + } + + /** + * Checks if it is a root group. + * + * @return true if the group is a root group; otherwise, returns false. + */ + public final boolean isRoot() { + return (parent == null); + } + + /** + * Returns the total number of members of this group in file. + * + * Current Java applications such as HDFView cannot handle files with large + * numbers of objects (1,000,000 or more objects) due to JVM memory + * limitation. The max_members is used so that applications such as HDFView + * will load up to max_members number of objects. If the number of + * objects in file is larger than max_members, only + * max_members are loaded in memory. + *

+ * getNumberOfMembersInFile() returns the number of objects in this group. + * The number of objects in memory is obtained by getMemberList().size(). + * + * @return Total number of members of this group in the file. + */ + public int getNumberOfMembersInFile() { + return nMembersInFile; + } + + /** + * Get the HObject at the specified index in this Group's member list. + * @param idx The index of the HObject to get. + * @return The HObject at the specified index. + */ + public HObject getMember(int idx) { + if(memberList.size() <= 0 || idx >= memberList.size()) return null; + + return memberList.get(idx); + } +} diff --git a/src/main/java/hdf/object/HObject.java b/src/main/java/hdf/object/HObject.java new file mode 100644 index 0000000..e9a74da --- /dev/null +++ b/src/main/java/hdf/object/HObject.java @@ -0,0 +1,562 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.io.Serializable; + +/** + * The HObject class is the root class of all the HDF data objects. Every data + * class has HObject as a superclass. All objects implement the methods of this + * class. The following is the inherited structure of HDF Objects. + * + *

+ *                                 HObject
+ *          __________________________|________________________________
+ *          |                         |                               |
+ *        Group                    Dataset                        Datatype
+ *          |                _________|___________                    |
+ *          |                |                   |                    |
+ *          |             ScalarDS          CompoundDS                |
+ *          |                |                   |                    |
+ *    ---------------------Implementing classes such as-------------------------
+ *      ____|____       _____|______        _____|_____          _____|_____
+ *      |       |       |          |        |         |          |         |
+ *   H5Group H4Group H5ScalarDS H4ScalarDS H5CompDS H4CompDS H5Datatype H4Datatype
+ *
+ * 
+ * + * All HDF4 and HDF5 data objects are inherited from HObject. At the top level + * of the hierarchy, both HDF4 and HDF5 have the same super-classes, such as + * Group and Dataset. At the bottom level of the hierarchy, HDF4 and HDF5 + * objects have their own implementation, such as H5Group, H5ScalarDS, + * H5CompoundDS, and H5Datatype. + *

+ * Warning: HDF4 and HDF5 may have multiple links to the same object. Data + * objects in this model do not deal with multiple links. Users may create + * duplicate copies of the same data object with different paths. Applications + * should check the OID of the data object to avoid duplicate copies of the same + * object. + *

+ * HDF4 objects are uniquely identified by the OID (tag_id, ref_id) pair. The + * ref_id is the object reference count. The tag_id is a pre-defined number to + * identify the type of object. For example, DFTAG_RI is for raster image, + * DFTAG_SD is for scientific dataset, and DFTAG_VG is for Vgroup. + *

+ * HDF5 objects are uniquely identified by the OID containing just the object + * reference. The OID is usually obtained by H5Rcreate(). The following example + * shows how to retrieve an object ID from a file: + * + *

+ * // retrieve the object ID
+ * try {
+ *     byte[] ref_buf = H5.H5Rcreate(h5file.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
+ *     long[] oid = new long[1];
+ *     oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
+ * } catch (Exception ex) {
+ * }
+ * 
+ * + * @version 2.0 4/2/2018 + * @author Peter X. Cao, Jordan T. Henderson + * @see hdf.object.DataFormat + */ +public abstract class HObject implements Serializable { + + /** + * The serialVersionUID is a universal version identifier for a Serializable + * class. Deserialization uses this number to ensure that a loaded class + * corresponds exactly to a serialized object. For details, see + * http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html + */ + private static final long serialVersionUID = -1723666708199882519L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(HObject.class); + + /** + * The separator of object path, i.e. "/". + */ + public static final String SEPARATOR = "/"; + + /** + * The full path of the file that contains the object. + */ + private String filename; + + /** + * The file which contains the object + */ + protected final FileFormat fileFormat; + + /** + * The name of the data object. The root group has its default name, a + * slash. The name can be changed except the root group. + */ + private String name; + + /** + * The full path of the data object. The full path always starts with the + * root, a slash. The path cannot be changed. Also, a path must be ended with a + * slash. For example, /arrays/ints/ + */ + private String path; + + /** The full name of the data object, i.e. "path + name" */ + private String fullName; + + /** + * Array of long integer storing unique identifier for the object. + *

+ * HDF4 objects are uniquely identified by a (tag_id, ref_id) pair. i.e. + * oid[0] = tag, oid[1] = ref_id.
+ * HDF5 objects are uniquely identified by an object reference. i.e. + * oid[0] = obj_id. + */ + protected long[] oid; + + /** + * The name of the Target Object that is being linked to. + */ + protected String linkTargetObjName; + + /** + * Number of attributes attached to the object. + */ + // protected int nAttributes = -1; + + /** + * Constructs an instance of a data object without name and path. + */ + public HObject() { + this(null, null, null, null); + } + + /** + * Constructs an instance of a data object with specific name and path. + *

+ * For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name + * of the dataset, "/arrays" is the group path of the dataset. + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the group path of the data object, e.g. "/arrays". + */ + public HObject(FileFormat theFile, String theName, String thePath) { + this(theFile, theName, thePath, null); + } + + /** + * Constructs an instance of a data object with specific name and path. + *

+ * For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name + * of the dataset, "/arrays" is the group path of the dataset. + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the group path of the data object, e.g. "/arrays". + * @param oid + * the ids of the data object. + */ + @Deprecated + public HObject(FileFormat theFile, String theName, String thePath, long[] oid) { + this.fileFormat = theFile; + this.oid = oid; + + if (fileFormat != null) { + this.filename = fileFormat.getFilePath(); + } + else { + this.filename = null; + } + + // file name is packed in the full path + if ((theName == null) && (thePath != null)) { + if (thePath.equals(SEPARATOR)) { + theName = SEPARATOR; + thePath = null; + } + else { + // the path must starts with "/" + if (!thePath.startsWith(HObject.SEPARATOR)) { + thePath = HObject.SEPARATOR + thePath; + } + + // get rid of the last "/" + if (thePath.endsWith(HObject.SEPARATOR)) { + thePath = thePath.substring(0, thePath.length() - 1); + } + + // separate the name and the path + theName = thePath.substring(thePath.lastIndexOf(SEPARATOR) + 1); + thePath = thePath.substring(0, thePath.lastIndexOf(SEPARATOR)); + } + } + else if ((theName != null) && (thePath == null) && (theName.indexOf(SEPARATOR) >= 0)) { + if (theName.equals(SEPARATOR)) { + theName = SEPARATOR; + thePath = null; + } + else { + // the full name must starts with "/" + if (!theName.startsWith(SEPARATOR)) { + theName = SEPARATOR + theName; + } + + // the fullname must not end with "/" + int n = theName.length(); + if (theName.endsWith(SEPARATOR)) { + theName = theName.substring(0, n - 1); + } + + int idx = theName.lastIndexOf(SEPARATOR); + if (idx < 0) { + thePath = SEPARATOR; + } + else { + thePath = theName.substring(0, idx); + theName = theName.substring(idx + 1); + } + } + } + + // the path must start and end with "/" + if (thePath != null) { + thePath = thePath.replaceAll("//", "/"); + if (!thePath.endsWith(SEPARATOR)) { + thePath += SEPARATOR; + } + } + + this.name = theName; + this.path = thePath; + + log.trace("name={} path={}", this.name, this.path); + + if (thePath != null) { + this.fullName = thePath + theName; + } + else { + if (theName == null) { + this.fullName = "/"; + } + else if (theName.startsWith("/")) { + this.fullName = theName; + } + else { + if (this instanceof Attribute) + this.fullName = theName; + else + this.fullName = "/" + theName; + } + } + + log.trace("fullName={}", this.fullName); + } + + /** + * Print out debug information + *

+ * + * @param msg + * the debug message to print + */ + protected final void debug(Object msg) { + System.out.println("*** " + this.getClass().getName() + ": " + msg); + } + + /** + * Returns the name of the file that contains this data object. + *

+ * The file name is necessary because the file of this data object is + * uniquely identified when multiple files are opened by an application at + * the same time. + * + * @return The full path (path + name) of the file. + */ + public final String getFile() { + return filename; + } + + /** + * Returns the name of the object. For example, "Raster Image #2". + * + * @return The name of the object. + */ + public final String getName() { + return name; + } + + /** + * Returns the name of the target object that is linked to. + * + * @return The name of the object that is linked to. + */ + public final String getLinkTargetObjName() { + return linkTargetObjName; + } + + /** + * Sets the name of the target object that is linked to. + * + * @param targetObjName + * The new name of the object. + */ + public final void setLinkTargetObjName(String targetObjName) { + linkTargetObjName = targetObjName; + } + + /** + * Returns the full name (group path + object name) of the object. For + * example, "/Images/Raster Image #2" + * + * @return The full name (group path + object name) of the object. + */ + public final String getFullName() { + return fullName; + } + + /** + * Returns the group path of the object. For example, "/Images". + * + * @return The group path of the object. + */ + public final String getPath() { + return path; + } + + /** + * Sets the name of the object. + * + * setName (String newName) changes the name of the object in the file. + * + * @param newName + * The new name of the object. + * + * @throws Exception if name is root or contains separator + */ + public void setName(String newName) throws Exception { + if (newName != null) { + if (newName.equals(HObject.SEPARATOR)) { + throw new IllegalArgumentException("The new name cannot be the root"); + } + + if (newName.startsWith(HObject.SEPARATOR)) { + newName = newName.substring(1); + } + + if (newName.endsWith(HObject.SEPARATOR)) { + newName = newName.substring(0, newName.length() - 2); + } + + if (newName.contains(HObject.SEPARATOR)) { + throw new IllegalArgumentException("The new name contains the SEPARATOR character: " + HObject.SEPARATOR); + } + } + + name = newName; + } + + /** + * Sets the path of the object. + *

+ * setPath() is needed to change the path for an object when the name of a + * group containing the object is changed by setName(). The path of the + * object in memory under this group should be updated to the new path to + * the group. Unlike setName(), setPath() does not change anything in file. + * + * @param newPath + * The new path of the object. + * + * @throws Exception if a failure occurred + */ + public void setPath(String newPath) throws Exception { + if (newPath == null) { + newPath = "/"; + } + + path = newPath; + } + + /** + * Opens an existing object such as a dataset or group for access. + * + * The return value is an object identifier obtained by implementing classes + * such as H5.H5Dopen(). This function is needed to allow other objects to + * be able to access the object. For instance, H5File class uses the open() + * function to obtain object identifier for copyAttributes(long src_id, long + * dst_id) and other purposes. The open() function should be used in pair + * with close(long) function. + * + * @see HObject#close(long) + * + * @return the object identifier if successful; otherwise returns a negative + * value. + */ + public abstract long open(); + + /** + * Closes access to the object. + *

+ * Sub-classes must implement this interface because different data objects + * have their own ways of how the data resources are closed. + *

+ * For example, H5Group.close() calls the hdf.hdf5lib.H5.H5Gclose() + * method and closes the group resource specified by the group id. + * + * @param id + * The object identifier. + */ + public abstract void close(long id); + + /** + * Returns the file identifier of of the file containing the object. + * + * @return the file identifier of of the file containing the object. + */ + public final long getFID() { + if (fileFormat != null) { + return fileFormat.getFID(); + } + else { + return -1; + } + } + + /** + * Returns the file that contains the object. + * + * @return The file that contains the object. + */ + public final FileFormat getFileFormat() { + return fileFormat; + } + + /** + * Returns a cloned copy of the object identifier. + *

+ * The object OID cannot be modified once it is created. getOID() clones the object OID to ensure + * the object OID cannot be modified outside of this class. + * + * @return the cloned copy of the object OID. + */ + public final long[] getOID() { + if (oid == null) { + return null; + } + + return oid.clone(); + } + + /** + * Checks if the OID of the object is the same as the given object identifier within the same file. + *

+ * HDF4 and HDF5 data objects are identified by their unique OIDs. A data object in a file may have + * multiple logical names , which are represented in a graph structure as separate objects. + *

+ * The HObject.equalsOID(long[] theID) can be used to check if two data objects with different names + * are pointed to the same object within the same file. + * + * @param theID + * The list object identifiers. + * + * @return true if the ID of the object equals the given OID; otherwise, returns false. + */ + public final boolean equalsOID(long[] theID) { + if ((theID == null) || (oid == null)) { + return false; + } + + int n1 = theID.length; + int n2 = oid.length; + + if (n1 == 0 || n2 == 0) { + return false; + } + + int n = Math.min(n1, n2); + boolean isMatched = (theID[0] == oid[0]); + + for (int i = 1; isMatched && (i < n); i++) { + isMatched = (theID[i] == oid[i]); + } + + return isMatched; + } + + /** + * Returns the name of the object. + *

+ * This method overwrites the toString() method in the Java Object class + * (the root class of all Java objects) so that it returns the name of the + * HObject instead of the name of the class. + *

+ * For example, toString() returns "Raster Image #2" instead of + * "hdf.object.h4.H4SDS". + * + * @return The name of the object. + */ + @Override + public String toString() { + if (this instanceof Group) { + if (((Group) this).isRoot() && this.getFileFormat() != null) return this.getFileFormat().getName(); + } + + if (name != null) return name; + + return super.toString(); + } + + /** + * Returns whether this HObject is equal to the specified HObject by comparing their OIDs. + * + * @param obj + * The object + * + * @return true if the object is equal by OID + */ + public boolean equals(HObject obj) { + // Cast down to Object to avoid infinite recursion + if (this.equals((Object) obj)) + return true; + + // comparing the state of OID with + // the state of 'this' OID. + return this.equalsOID(obj.getOID()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) + return false; + + // checking if both the object references are + // referring to the same object. + if (this == obj) + return true; + + return false; + } + + @Override + public int hashCode() { + // We are returning the OID as a hashcode value. + return (int) oid[0]; + } +} diff --git a/src/main/java/hdf/object/MetaDataContainer.java b/src/main/java/hdf/object/MetaDataContainer.java new file mode 100644 index 0000000..e3897cb --- /dev/null +++ b/src/main/java/hdf/object/MetaDataContainer.java @@ -0,0 +1,91 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.util.List; + +/** + * An interface that provides general I/O operations for object metadata + * attached to an object. For example, reading metadata content from the file + * into memory or writing metadata content from memory into the file. + *

+ * + * @see HObject + * + * @version 2.0 4/2/2018 + * @author Peter X. Cao, Jordan T. Henderson + */ +@SuppressWarnings("rawtypes") +public interface MetaDataContainer { + /** + * Retrieves the object's metadata, such as attributes, from the file. + *

+ * Metadata, such as attributes, is stored in a List. + * + * @return the list of metadata objects. + * + * @throws Exception + * if the metadata can not be retrieved + */ + public abstract List getMetadata() throws Exception; + + /** + * Writes a specific piece of metadata (such as an attribute) into the file. + * + * If an HDF(4&5) attribute exists in the file, this method updates its + * value. If the attribute does not exist in the file, it creates the + * attribute in the file and attaches it to the object. It will fail to + * write a new attribute to the object where an attribute with the same name + * already exists. To update the value of an existing attribute in the file, + * one needs to get the instance of the attribute by getMetadata(), change + * its values, then use writeMetadata() to write the value. + * + * @param metadata + * the metadata to write. + * + * @throws Exception + * if the metadata can not be written + */ + public abstract void writeMetadata(Object metadata) throws Exception; + + /** + * Deletes an existing piece of metadata from this object. + * + * @param metadata + * the metadata to delete. + * + * @throws Exception + * if the metadata can not be removed + */ + public abstract void removeMetadata(Object metadata) throws Exception; + + /** + * Updates an existing piece of metadata attached to this object. + * + * @param metadata + * the metadata to update. + * + * @throws Exception + * if the metadata can not be updated + */ + public abstract void updateMetadata(Object metadata) throws Exception; + + /** + * Check if the object has any attributes attached. + * + * @return true if it has any attributes, false otherwise. + */ + public abstract boolean hasAttribute(); +} diff --git a/src/main/java/hdf/object/ScalarDS.java b/src/main/java/hdf/object/ScalarDS.java new file mode 100644 index 0000000..1af9962 --- /dev/null +++ b/src/main/java/hdf/object/ScalarDS.java @@ -0,0 +1,450 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +import java.util.Iterator; +import java.util.List; +import java.util.Vector; + +/** + * A scalar dataset is a multiple dimension array of scalar points. The Datatype of a scalar dataset must be an atomic + * datatype. Common datatypes of scalar datasets include char, byte, short, int, long, float, double and string. + *

+ * A ScalarDS can be an image or spreadsheet data. ScalarDS defines methods to deal with both images and + * spreadsheets. + *

+ * ScalarDS is an abstract class. Current implementing classes are the H4SDS, H5GRImage and H5ScalarDS. + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public abstract class ScalarDS extends Dataset { + private static final long serialVersionUID = 8925371455928203981L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ScalarDS.class); + + /************************************************************ + * The following constant strings are copied from * + * https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html * + * to make the definition consistent with the image specs. * + ************************************************************/ + + /** + * Indicates that the pixel RGB values are contiguous. + */ + public static final int INTERLACE_PIXEL = 0; + + /** Indicates that each pixel component of RGB is stored as a scan line. */ + public static final int INTERLACE_LINE = 1; + + /** Indicates that each pixel component of RGB is stored as a plane. */ + public static final int INTERLACE_PLANE = 2; + + /** + * The interlace mode of the stored raster image data. Valid values are INTERLACE_PIXEL, INTERLACE_LINE and + * INTERLACE_PLANE. + */ + protected int interlace; + + /** + * The min-max range of image data values. For example, [0, 255] indicates the min is 0, and the max is 255. + */ + protected double[] imageDataRange; + + /** + * The indexed RGB color model with 256 colors. + *

+ * The palette values are stored in a two-dimensional byte array and arrange by color components of red, green and + * blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue + * components respectively. + */ + protected byte[][] palette; + + /** + * True if this dataset is an image. + */ + protected boolean isImage; + + /** + * True if this dataset is a true color image. + */ + protected boolean isTrueColor; + + /** + * True if this dataset is ASCII text. + */ + protected boolean isText; + + /** + * Flag to indicate is the original unsigned C data is converted. + */ + protected boolean unsignedConverted; + + /** The fill value of the dataset. */ + protected Object fillValue = null; + + private List filteredImageValues; + + /** Flag to indicate if the dataset is displayed as an image. */ + protected boolean isImageDisplay; + + /** + * Flag to indicate if the dataset is displayed as an image with default order of dimensions. + */ + protected boolean isDefaultImageOrder; + + /** + * Flag to indicate if the FillValue is converted from unsigned C. + */ + public boolean isFillValueConverted; + + /** + * Constructs an instance of a ScalarDS with specific name and path. An HDF data object must have a name. The path + * is the group path starting from the root. + *

+ * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group + * path of the dataset. + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + */ + public ScalarDS(FileFormat theFile, String theName, String thePath) { + this(theFile, theName, thePath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #ScalarDS(FileFormat, String, String)} + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + * @param oid + * the v of the data object. + */ + @Deprecated + public ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) { + super(theFile, theName, thePath, oid); + + palette = null; + isImage = false; + isTrueColor = false; + isText = false; + interlace = -1; + imageDataRange = null; + isImageDisplay = false; + isDefaultImageOrder = true; + isFillValueConverted = false; + filteredImageValues = new Vector<>(); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#clearData() + */ + @Override + public void clearData() { + super.clearData(); + unsignedConverted = false; + } + + /** + * Converts the data values of this dataset to appropriate Java integer if they are unsigned integers. + * + * @see Dataset#convertToUnsignedC(Object) + * @see Dataset#convertFromUnsignedC(Object, Object) + * + * @return the converted data buffer. + */ + @Override + public Object convertFromUnsignedC() { + log.trace("convertFromUnsignedC(): start"); + // keep a copy of original buffer and the converted buffer + // so that they can be reused later to save memory + log.trace("convertFromUnsignedC(): unsigned={}", getDatatype().isUnsigned()); + if ((data != null) && getDatatype().isUnsigned() && !unsignedConverted) { + log.trace("convertFromUnsignedC(): convert"); + originalBuf = data; + convertedBuf = convertFromUnsignedC(originalBuf, convertedBuf); + data = convertedBuf; + unsignedConverted = true; + + if (fillValue != null) { + if (!isFillValueConverted) { + fillValue = convertFromUnsignedC(fillValue, null); + isFillValueConverted = true; + } + } + } + + log.trace("convertFromUnsignedC(): finish"); + return data; + } + + /** + * Converts Java integer data of this dataset back to unsigned C-type integer data if they are unsigned integers. + * + * @see Dataset#convertToUnsignedC(Object) + * @see Dataset#convertToUnsignedC(Object, Object) + * @see #convertFromUnsignedC(Object data_in) + * + * @return the converted data buffer. + */ + @Override + public Object convertToUnsignedC() { + log.trace("convertToUnsignedC(): start"); + // keep a copy of original buffer and the converted buffer + // so that they can be reused later to save memory + log.trace("convertToUnsignedC(): unsigned={}", getDatatype().isUnsigned()); + if ((data != null) && getDatatype().isUnsigned()) { + log.trace("convertToUnsignedC(): convert"); + convertedBuf = data; + originalBuf = convertToUnsignedC(convertedBuf, originalBuf); + data = originalBuf; + } + + log.trace("convertToUnsignedC(): finish"); + return data; + } + + /** + * Returns the palette of this scalar dataset or null if palette does not exist. + *

+ * A Scalar dataset can be displayed as spreadsheet data or an image. When a scalar dataset is displayed as an + * image, the palette or color table may be needed to translate a pixel value to color components (for example, red, + * green, and blue). Some scalar datasets have no palette and some datasets have one or more than one palettes. If + * an associated palette exists but is not loaded, this interface retrieves the palette from the file and returns the + * palette. If the palette is loaded, it returns the palette. It returns null if there is no palette associated with + * the dataset. + *

+ * Current implementation only supports palette model of indexed RGB with 256 colors. Other models such as + * YUV", "CMY", "CMYK", "YCbCr", "HSV will be supported in the future. + *

+ * The palette values are stored in a two-dimensional byte array and are arranges by color components of red, green and + * blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue + * components respectively. + *

+ * Sub-classes have to implement this interface. HDF4 and HDF5 images use different libraries to retrieve the + * associated palette. + * + * @return the 2D palette byte array. + */ + public abstract byte[][] getPalette(); + + /** + * Sets the palette for this dataset. + * + * @param pal + * the 2D palette byte array. + */ + public final void setPalette(byte[][] pal) { + palette = pal; + } + + /** + * Reads a specific image palette from file. + *

+ * A scalar dataset may have multiple palettes attached to it. readPalette(int idx) returns a specific palette + * identified by its index. + * + * @param idx + * the index of the palette to read. + * + * @return the image palette + */ + public abstract byte[][] readPalette(int idx); + + /** + * Get the name of a specific image palette from file. + *

+ * A scalar dataset may have multiple palettes attached to it. getPaletteName(int idx) returns the name of a + * specific palette identified by its index. + * + * @param idx + * the index of the palette to retrieve the name. + * + * @return The name of the palette + */ + public String getPaletteName(int idx) { + String paletteName = "Default "; + if (idx != 0) + paletteName = "Default " + idx; + return paletteName; + } + + /** + * Returns the byte array of palette refs. + *

+ * A palette reference is an object reference that points to the palette dataset. + *

+ * For example, Dataset "Iceberg" has an attribute of object reference "Palette". The arrtibute "Palette" has value + * "2538" that is the object reference of the palette data set "Iceberg Palette". + * + * @return null if there is no palette attribute attached to this dataset. + */ + public abstract byte[] getPaletteRefs(); + + /** + * Returns true if this dataset is an image. + *

+ * For all Images, they must have an attribute called "CLASS". The value of this attribute is "IMAGE". For more + * details, read HDF5 Image and Palette Specification + * + * @return true if the dataset is an image; otherwise, returns false. + */ + public final boolean isImage() { + return isImage; + } + + /** + * Returns true if this dataset is displayed as an image. + *

+ * A ScalarDS can be displayed as an image or a spreadsheet in a table. + * + * @return true if this dataset is displayed as an image; otherwise, returns false. + */ + public final boolean isImageDisplay() { + + return isImageDisplay; + } + + /** + * Returns true if this dataset is displayed as an image with default image order. + *

+ * A ScalarDS can be displayed as an image with different orders of dimensions. + * + * @return true if this dataset is displayed as an image with default image order; otherwise, returns false. + */ + public final boolean isDefaultImageOrder() { + return isDefaultImageOrder; + } + + /** + * Sets the flag to display the dataset as an image. + * + * @param b + * if b is true, display the dataset as an image + */ + public final void setIsImageDisplay(boolean b) { + isImageDisplay = b; + } + + /** + * Sets the flag to indicate this dataset is an image. + * + * @param b + * if b is true, the dataset is an image. + */ + public final void setIsImage(boolean b) { + isImage = b; + } + + /** + * Sets data range for an image. + * + * @param min + * the data range start. + * @param max + * the data range end. + */ + public final void setImageDataRange(double min, double max) { + if (max <= min) + return; + + if (imageDataRange == null) + imageDataRange = new double[2]; + + imageDataRange[0] = min; + imageDataRange[1] = max; + } + + /** + * Add a value that will be filtered out in an image. + * + * @param x + * value to be filtered + */ + public void addFilteredImageValue(Number x) { + Iterator it = filteredImageValues.iterator(); + while (it.hasNext()) { + if (it.next().toString().equals(x.toString())) + return; + } + + filteredImageValues.add(x); + } + + /** + * Get a list of values that will be filtered out in an image. + * + * @return the list of Image values + */ + public List getFilteredImageValues() { + return filteredImageValues; + } + + /** + * @return true if this dataset is a true color image. + * + */ + + public final boolean isTrueColor() { + return isTrueColor; + } + + /** + * Returns the interlace mode of a true color image (RGB). + * + * Valid values: + * + *

+     *     INTERLACE_PIXEL -- RGB components are contiguous, i.e. rgb, rgb, rgb, ...
+     *     INTERLACE_LINE -- each RGB component is stored as a scan line
+     *     INTERLACE_PLANE -- each RGB component is stored as a plane
+     * 
+ * + * @return the interlace mode of a true color image (RGB). + */ + public final int getInterlace() { + return interlace; + } + + /** + * Returns the (min, max) pair of image data range. + * + * @return the (min, max) pair of image data range. + */ + public double[] getImageDataRange() { + return imageDataRange; + } + + /** + * Returns the fill values for the dataset. + * + * @return the fill values for the dataset. + */ + @Override + public final Object getFillValue() { + return fillValue; + } +} diff --git a/src/main/java/hdf/object/Utils.java b/src/main/java/hdf/object/Utils.java new file mode 100644 index 0000000..36de85e --- /dev/null +++ b/src/main/java/hdf/object/Utils.java @@ -0,0 +1,48 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object; + +public final class Utils { + private Utils() { + throw new IllegalStateException("Utility class"); + } + + /** + * Retrieves the Java Runtime Class of the given Object. B = byte array, S = short array, I = int + * array, J = long array, F = float array, D = double array, L = class or interface + * + * @param o + * the Object to determine the Runtime Class of + * @return the Java Runtime Class of the given Object. + */ + public static char getJavaObjectRuntimeClass(Object o) { + if (o == null) + return ' '; + + String cName = o.getClass().getName(); + + if (cName.equals("java.lang.String") || cName.equals("java.util.Vector") + || cName.equals("java.util.Arrays$ArrayList") || cName.equals("java.util.ArrayList")) + return 'L'; + + int cIndex = cName.lastIndexOf('['); + if (cIndex >= 0) { + return cName.charAt(cIndex + 1); + } + + return ' '; + } + +} \ No newline at end of file diff --git a/src/main/java/hdf/object/h5/H5CompoundDS.java b/src/main/java/hdf/object/h5/H5CompoundDS.java new file mode 100644 index 0000000..69844d3 --- /dev/null +++ b/src/main/java/hdf/object/h5/H5CompoundDS.java @@ -0,0 +1,2255 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.lang.reflect.Array; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5DataFiltersException; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.Attribute; +import hdf.object.CompoundDS; +import hdf.object.Dataset; +import hdf.object.Datatype; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.HObject; +import hdf.object.Utils; + +/** + * The H5CompoundDS class defines an HDF5 dataset of compound datatypes. + *

+ * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata + * that stores a description of the data elements, data layout, and all other information necessary + * to write, read, and interpret the stored data. + *

+ * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a + * collection of one or more atomic types or small arrays of such types. Each member of a compound + * type has a name which is unique within that type, and a byte offset that determines the first + * byte (smallest byte address) of that member in a compound datum. + *

+ * For more information on HDF5 datasets and datatypes, read the HDF5 + * User's Guide. + *

+ * There are two basic types of compound datasets: simple compound data and nested compound data. + * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset + * are compound or array of compound data. + *

+ * Since Java does not understand C structures, we cannot directly read/write compound data values + * as in the following C example. + * + *

+ * typedef struct s1_t {
+ *         int    a;
+ *         float  b;
+ *         double c;
+ *         } s1_t;
+ *     s1_t       s1[LENGTH];
+ *     ...
+ *     H5Dwrite(..., s1);
+ *     H5Dread(..., s1);
+ * 
+ * + * Values of compound data fields are stored in java.util.Vector object. We read and write compound + * data by fields instead of compound structure. As for the example above, the java.util.Vector + * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands + * the primitive datatypes of int, float and double, we will be able to read/write the compound data + * by field. + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public class H5CompoundDS extends CompoundDS { + private static final long serialVersionUID = -5968625125574032736L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class); + + /** + * The list of attributes of this data object. Members of the list are instance of Attribute. + */ + private List attributeList; + + private int nAttributes = -1; + + private H5O_info_t objInfo; + + /** + * A list of names of all fields including nested fields. + *

+ * The nested names are separated by CompoundDS.SEPARATOR. For example, if compound dataset "A" has + * the following nested structure, + * + *

+     * A --> m01
+     * A --> m02
+     * A --> nest1 --> m11
+     * A --> nest1 --> m12
+     * A --> nest1 --> nest2 --> m21
+     * A --> nest1 --> nest2 --> m22
+     * i.e.
+     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
+     * 
+ * + * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12, + * nest1[nest2[m21, nest1[nest2[m22} + * + */ + private List flatNameList; + + /** + * A list of datatypes of all fields including nested fields. + */ + private List flatTypeList; + + /** flag to indicate if the dataset is an external dataset */ + private boolean isExternal = false; + + /** flag to indicate if the dataset is a virtual dataset */ + private boolean isVirtual = false; + private List virtualNameList; + + /* + * Enum to indicate the type of I/O to perform inside of the common I/O + * function. + */ + protected static enum IO_TYPE { + READ, WRITE + }; + + /** + * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path. + *

+ * The dataset object represents an existing dataset in the file. For example, new + * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the + * dataset,"dset1", at group "/g0/". + *

+ * This object is usually constructed at FileFormat.open(), which loads the file structure and + * object information into memory. It is rarely used elsewhere. + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + */ + public H5CompoundDS(FileFormat theFile, String theName, String thePath) { + this(theFile, theName, thePath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #H5CompoundDS(FileFormat, String, String)} + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + * @param oid + * the oid of the data object. + */ + @Deprecated + public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) { + super(theFile, theName, thePath, oid); + objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null); + + if ((oid == null) && (theFile != null)) { + // retrieve the object ID + try { + byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1); + this.oid = new long[1]; + this.oid[0] = HDFNativeData.byteToLong(refBuf, 0); + } + catch (Exception ex) { + log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName()); + } + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#open() + */ + @Override + public long open() { + log.trace("open(): start"); + + long did = -1; + + try { + did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + log.trace("open(): did={}", did); + } + catch (HDF5Exception ex) { + log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex); + did = -1; + } + + log.trace("open(): finish"); + return did; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#close(int) + */ + @Override + public void close(long did) { + log.trace("close(): start"); + + if (did >= 0) { + try { + H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL); + } + catch (Exception ex) { + log.debug("close(): H5Fflush(did {}) failure: ", did, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("close(): H5Dclose(did {}) failure: ", did, ex); + } + } + + log.trace("close(): finish"); + } + + /** + * Retrieves datatype and dataspace information from file and sets the dataset + * in memory. + *

+ * The init() is designed to support lazy operation in a dataset object. When a + * data object is retrieved from file, the datatype, dataspace and raw data are + * not loaded into memory. When it is asked to read the raw data from file, + * init() is first called to get the datatype and dataspace information, then + * load the raw data from file. + *

+ * init() is also used to reset the selection of a dataset (start, stride and + * count) to the default, which is the entire dataset for 1D or 2D datasets. In + * the following example, init() at step 1) retrieves datatype and dataspace + * information from file. getData() at step 3) reads only one data point. init() + * at step 4) resets the selection to the whole dataset. getData() at step 4) + * reads the values of whole dataset into memory. + * + *

+     * dset = (Dataset) file.get(NAME_DATASET);
+     *
+     * // 1) get datatype and dataspace information from file
+     * dset.init();
+     * rank = dset.getRank(); // rank = 2, a 2D dataset
+     * count = dset.getSelectedDims();
+     * start = dset.getStartDims();
+     * dims = dset.getDims();
+     *
+     * // 2) select only one data point
+     * for (int i = 0; i < rank; i++) {
+     *     start[0] = 0;
+     *     count[i] = 1;
+     * }
+     *
+     * // 3) read one data point
+     * data = dset.getData();
+     *
+     * // 4) reset selection to the whole dataset
+     * dset.init();
+     *
+     * // 5) clean the memory data buffer
+     * dset.clearData();
+     *
+     * // 6) Read the whole dataset
+     * data = dset.getData();
+     * 
+ */ + @Override + public void init() { + log.trace("init(): start"); + + if (inited) { + resetSelection(); + log.trace("init(): Dataset already initialized"); + log.trace("init(): finish"); + return; // already called. Initialize only once + } + + long did = -1; + long tid = -1; + long sid = -1; + flatNameList = new Vector<>(); + flatTypeList = new Vector<>(); + + did = open(); + if (did >= 0) { + // check if it is an external or virtual dataset + long pid = -1; + try { + pid = H5.H5Dget_create_plist(did); + try { + int nfiles = H5.H5Pget_external_count(pid); + isExternal = (nfiles > 0); + int layoutType = H5.H5Pget_layout(pid); + if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) { + try { + long vmaps = H5.H5Pget_virtual_count(pid); + if (vmaps > 0) { + virtualNameList = new Vector<>(); + for (long next = 0; next < vmaps; next++) { + try { + String fname = H5.H5Pget_virtual_filename(pid, next); + virtualNameList.add(fname); + log.trace("init(): virtualNameList[{}]={}", next, fname); + } + catch (Exception err) { + log.trace("init(): vds[{}] continue", next); + } + } + } + } + catch (Exception err) { + log.debug("init(): vds count error: ", err); + } + } + log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual); + } + catch (Exception ex) { + log.debug("init(): check if it is an external or virtual dataset:", ex); + } + } + catch (Exception ex) { + log.debug("init(): H5Dget_create_plist() failure: ", ex); + } + finally { + try { + H5.H5Pclose(pid); + } + catch (Exception ex) { + log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex); + } + } + + try { + sid = H5.H5Dget_space(did); + rank = H5.H5Sget_simple_extent_ndims(sid); + tid = H5.H5Dget_type(did); + log.trace("init(): tid={} sid={} rank={}", tid, sid, rank); + + if (rank == 0) { + // a scalar data point + rank = 1; + dims = new long[1]; + dims[0] = 1; + log.trace("init(): rank is a scalar data point"); + } + else { + dims = new long[rank]; + maxDims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, dims, maxDims); + log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); + } + + startDims = new long[rank]; + selectedDims = new long[rank]; + + try { + datatype = new H5Datatype(tid); + + log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid, + datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef()); + + H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList); + } + catch (Exception ex) { + log.debug("init(): failed to create datatype for dataset: ", ex); + datatype = null; + } + + // initialize member information + numberOfMembers = flatNameList.size(); + log.trace("init(): numberOfMembers={}", numberOfMembers); + + memberNames = new String[numberOfMembers]; + memberTypes = new Datatype[numberOfMembers]; + memberOrders = new int[numberOfMembers]; + isMemberSelected = new boolean[numberOfMembers]; + memberDims = new Object[numberOfMembers]; + + for (int i = 0; i < numberOfMembers; i++) { + isMemberSelected[i] = true; + memberOrders[i] = 1; + memberDims[i] = null; + + try { + memberTypes[i] = flatTypeList.get(i); + log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription()); + + if (memberTypes[i].isArray()) { + long mdim[] = memberTypes[i].getArrayDims(); + int idim[] = new int[mdim.length]; + int arrayNpoints = 1; + + for (int j = 0; j < idim.length; j++) { + idim[j] = (int) mdim[j]; + arrayNpoints *= idim[j]; + } + + memberDims[i] = idim; + memberOrders[i] = arrayNpoints; + } + } + catch (Exception ex) { + log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex); + memberTypes[i] = null; + } + + try { + memberNames[i] = flatNameList.get(i); + log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]); + } + catch (Exception ex) { + log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex); + memberNames[i] = "null"; + } + } // (int i=0; i= 0) { + try { + objInfo = H5.H5Oget_info(did); + nAttributes = (int) objInfo.num_attrs; + } + catch (Exception ex) { + objInfo.num_attrs = 0; + log.debug("hasAttribute(): get object info failure: ", ex); + } + close(did); + } + else { + log.debug("hasAttribute(): could not open dataset"); + } + } + + log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs); + return (objInfo.num_attrs > 0); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getDatatype() + */ + @Override + public Datatype getDatatype() { + log.trace("getDatatype(): start"); + + if (!inited) + init(); + + if (datatype == null) { + long did = -1; + long tid = -1; + + log.trace("getDatatype(): datatype == null"); + + did = open(); + if (did >= 0) { + try { + tid = H5.H5Dget_type(did); + datatype = new H5Datatype(tid); + } + catch (Exception ex) { + log.debug("getDatatype(): ", ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex); + } + } + } + } + + if (isExternal) { + String pdir = this.getFileFormat().getAbsoluteFile().getParent(); + + if (pdir == null) { + pdir = "."; + } + System.setProperty("user.dir", pdir); + log.trace("getDatatype(): External dataset: user.dir={}", pdir); + } + + log.trace("getDatatype(): finish"); + return datatype; + } + + @Override + public Object getFillValue() { + return null; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#clear() + */ + @Override + public void clear() { + super.clear(); + + if (attributeList != null) { + ((Vector) attributeList).setSize(0); + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#readBytes() + */ + @Override + public byte[] readBytes() throws HDF5Exception { + log.trace("readBytes(): start"); + + byte[] theData = null; + + if (!isInited()) + init(); + + long did = open(); + if (did >= 0) { + long fspace = -1; + long mspace = -1; + long tid = -1; + + try { + long[] lsize = { 1 }; + for (int j = 0; j < selectedDims.length; j++) { + lsize[0] *= selectedDims[j]; + } + + fspace = H5.H5Dget_space(did); + mspace = H5.H5Screate_simple(rank, selectedDims, null); + + // set the rectangle selection + // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump + if (rank * dims[0] > 1) { + H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, + selectedDims, null); // set block to 1 + } + + tid = H5.H5Dget_type(did); + long size = H5.H5Tget_size(tid) * lsize[0]; + log.trace("readBytes(): size = {}", size); + + if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size"); + + theData = new byte[(int) size]; + + log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace); + H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData); + } + catch (Exception ex) { + log.debug("readBytes(): failed to read data: ", ex); + } + finally { + try { + H5.H5Sclose(fspace); + } + catch (Exception ex2) { + log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2); + } + try { + H5.H5Sclose(mspace); + } + catch (Exception ex2) { + log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2); + } + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex2) { + log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); + } + close(did); + } + } + + log.trace("readBytes(): finish"); + return theData; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#read() + */ + @Override + public Object read() throws Exception { + log.trace("read(): start"); + + Object readData = null; + + if (!isInited()) + init(); + + try { + readData = compoundDatasetCommonIO(IO_TYPE.READ, null); + } + catch (Exception ex) { + log.debug("read(): failed to read compound dataset: ", ex); + throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex); + } + + log.trace("read(): finish"); + + return readData; + } + + /** + * Writes the given data buffer into this dataset in a file. + *

+ * The data buffer is a vector that contains the data values of compound fields. The data is written + * into file field by field. + * + * @param buf + * The vector that contains the data values of compound fields. + * + * @throws Exception + * If there is an error at the HDF5 library level. + */ + @Override + public void write(Object buf) throws Exception { + log.trace("write(): start"); + + if (this.getFileFormat().isReadOnly()) + throw new Exception("cannot write to compound dataset in file opened as read-only"); + + if (!isInited()) + init(); + + try { + compoundDatasetCommonIO(IO_TYPE.WRITE, buf); + } + catch (Exception ex) { + log.debug("write(): failed to write compound dataset: ", ex); + throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex); + } + + log.trace("write(): finish"); + } + + private Object compoundDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception { + log.trace("compoundDatasetCommonIO(): start"); + + H5Datatype dsDatatype = (H5Datatype) getDatatype(); + Object data = null; + + if (numberOfMembers <= 0) { + log.debug("compoundDatasetCommonIO(): Dataset contains no members"); + log.trace("compoundDatasetCommonIO(): exit"); + throw new Exception("dataset contains no members"); + } + + /* + * I/O type-specific pre-initialization. + */ + if (ioType == IO_TYPE.WRITE) { + if ((writeBuf == null) || !(writeBuf instanceof List)) { + log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid"); + log.trace("compoundDatasetCommonIO(): exit"); + throw new Exception("write buffer is null or invalid"); + } + + /* + * Check for any unsupported datatypes and fail early before + * attempting to write to the dataset. + */ + if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) { + log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND"); + log.trace("compoundDatasetCommonIO(): finish"); + throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND"); + } + + if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) { + log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND"); + log.trace("compoundDatasetCommonIO(): finish"); + throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND"); + } + } + + log.trace("compoundDatasetCommonIO(): open dataset"); + + long did = open(); + if (did >= 0) { + long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace + + try { + /* + * NOTE: this call sets up a hyperslab selection in the file according to the + * current selection in the dataset object. + */ + long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims, + selectedStride, selectedDims, spaceIDs); + + data = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0}); + } + finally { + if (HDF5Constants.H5S_ALL != spaceIDs[0]) { + try { + H5.H5Sclose(spaceIDs[0]); + } + catch (Exception ex) { + log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex); + } + } + + if (HDF5Constants.H5S_ALL != spaceIDs[1]) { + try { + H5.H5Sclose(spaceIDs[1]); + } + catch (Exception ex) { + log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex); + } + } + + close(did); + } + } + else + log.debug("compoundDatasetCommonIO(): failed to open dataset"); + + log.trace("compoundDatasetCommonIO(): finish"); + + return data; + } + + /* + * Private recursive routine to read/write an entire compound datatype field by + * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of + * COMPOUND datatypes. + * + * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a + * running counter so that we can index properly into the flattened name list + * generated from H5Datatype.extractCompoundInfo() at dataset init time. + */ + private Object compoundTypeIO(IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints, final H5Datatype cmpdType, + Object writeBuf, int[] globalMemberIndex) { + log.trace("compoundTypeIO(): start"); + + Object theData = null; + + if (cmpdType.isArray()) { + log.trace("compoundTypeIO(): ARRAY type"); + + long[] arrayDims = cmpdType.getArrayDims(); + int arrSize = nSelPoints; + for (int i = 0; i < arrayDims.length; i++) { + arrSize *= arrayDims[i]; + } + + theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex); + } + else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { + /* + * TODO: true variable-length support. + */ + String[] errVal = new String[nSelPoints]; + String errStr = "*UNSUPPORTED*"; + + for (int j = 0; j < nSelPoints; j++) + errVal[j] = errStr; + + /* + * Setup a fake data list. + */ + Datatype baseType = cmpdType.getDatatypeBase(); + while (baseType != null && !baseType.isCompound()) { + baseType = baseType.getDatatypeBase(); + } + + List fakeVlenData = (List) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints); + fakeVlenData.add(errVal); + + theData = fakeVlenData; + } + else if (cmpdType.isCompound()) { + List memberDataList = null; + List typeList = cmpdType.getCompoundMemberTypes(); + + log.trace("compoundTypeIO(): {} {} members:", (ioType == IO_TYPE.READ) ? "read" : "write", + typeList.size()); + + if (ioType == IO_TYPE.READ) { + memberDataList = (List) H5Datatype.allocateArray(cmpdType, nSelPoints); + } + + try { + for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) { + H5Datatype memberType = null; + String memberName = null; + Object memberData = null; + + try { + memberType = (H5Datatype) typeList.get(i); + } + catch (Exception ex) { + log.debug("compoundTypeIO(): get member {} failure: ", i, ex); + globalMemberIndex[0]++; + continue; + } + + /* + * Since the type list used here is not a flattened structure, we need to skip + * the member selection check for compound types, as otherwise having a single + * member not selected would skip the reading/writing for the entire compound + * type. The member selection check will be deferred to the recursive compound + * read/write below. + */ + if (!memberType.isCompound()) { + if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { + log.debug("compoundTypeIO(): member[{}] is not selected", i); + globalMemberIndex[0]++; + continue; // the field is not selected + } + } + + if (!memberType.isCompound()) { + try { + memberName = new String(flatNameList.get(globalMemberIndex[0])); + } + catch (Exception ex) { + log.debug("compoundTypeIO(): get member {} name failure: ", i, ex); + memberName = "null"; + } + } + + log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription()); + + if (ioType == IO_TYPE.READ) { + try { + if (memberType.isCompound()) + memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex); + else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) { + /* + * Recursively detect any nested array/vlen of compound types. + */ + boolean compoundFound = false; + + Datatype base = memberType.getDatatypeBase(); + while (base != null) { + if (base.isCompound()) + compoundFound = true; + + base = base.getDatatypeBase(); + } + + if (compoundFound) { + /* + * Skip the top-level array/vlen type. + */ + globalMemberIndex[0]++; + + memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex); + } + else { + memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName); + globalMemberIndex[0]++; + } + } + else { + memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName); + globalMemberIndex[0]++; + } + } + catch (Exception ex) { + log.debug("compoundTypeIO(): failed to read member {}: ", i, ex); + globalMemberIndex[0]++; + memberData = null; + } + + if (memberData == null) { + String[] errVal = new String[nSelPoints]; + String errStr = "*ERROR*"; + + for (int j = 0; j < nSelPoints; j++) + errVal[j] = errStr; + + memberData = errVal; + } + + memberDataList.add(memberData); + } + else { + try { + /* + * TODO: currently doesn't correctly handle non-selected compound members. + */ + memberData = ((List) writeBuf).get(writeListIndex++); + } + catch (Exception ex) { + log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex); + globalMemberIndex[0]++; + continue; + } + + if (memberData == null) { + log.debug("compoundTypeIO(): member[{}] data is null", i); + globalMemberIndex[0]++; + continue; + } + + try { + if (memberType.isCompound()) { + List nestedList = (List) ((List) writeBuf).get(writeListIndex++); + compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex); + } + else { + writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData); + globalMemberIndex[0]++; + } + } + catch (Exception ex) { + log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex); + globalMemberIndex[0]++; + } + } + } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++) + } + catch (Exception ex) { + log.debug("compoundTypeIO(): failure: ", ex); + memberDataList = null; + } + + theData = memberDataList; + } + + log.trace("compoundTypeIO(): finish"); + + return theData; + } + + /* + * Private routine to read a single field of a compound datatype by creating a + * compound datatype and inserting the single field into that datatype. + */ + private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType, + String memberName) throws Exception { + log.trace("readSingleCompoundMember(): start"); + + H5Datatype dsDatatype = (H5Datatype) this.getDatatype(); + Object memberData = null; + + try { + memberData = H5Datatype.allocateArray(memberType, nSelPoints); + log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints); + } + catch (OutOfMemoryError err) { + memberData = null; + throw new Exception("Out of memory"); + } + catch (Exception ex) { + log.debug("readSingleCompoundMember(): ", ex); + memberData = null; + } + + if (memberData != null) { + /* + * Create a compound datatype containing just a single field (the one which we + * want to read). + */ + long compTid = -1; + try { + compTid = dsDatatype.createCompoundFieldType(memberName); + } + catch (HDF5Exception ex) { + log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ", + memberType.getDescription(), ex); + memberData = null; + } + + /* + * Actually read the data for this member now that everything has been setup. + */ + try { + if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) { + log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}", + dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData); + } + else { + log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}", + dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData); + } + } + catch (HDF5DataFiltersException exfltr) { + log.debug("readSingleCompoundMember(): read failure: ", exfltr); + log.trace("readSingleCompoundMember(): exit"); + throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr); + } + catch (Exception ex) { + log.debug("readSingleCompoundMember(): read failure: ", ex); + log.trace("readSingleCompoundMember(): exit"); + throw new Exception("failed to read compound member: " + ex.getMessage(), ex); + } + finally { + dsDatatype.close(compTid); + } + + /* + * Perform any necessary data conversions. + */ + if (memberType.isUnsigned()) { + log.trace("readSingleCompoundMember(): converting from unsigned C-type integers"); + memberData = Dataset.convertFromUnsignedC(memberData, null); + } + else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') { + log.trace("readSingleCompoundMember(): converting byte array member into Object"); + + /* + * For all other types that get read into memory as a byte[] (such as nested + * compounds and arrays of compounds), we must manually convert the byte[] into + * something usable. + */ + memberData = convertByteMember(memberType, (byte[]) memberData); + } + } + + log.trace("readSingleCompoundMember(): finish"); + + return memberData; + } + + /* + * Private routine to write a single field of a compound datatype by creating a + * compound datatype and inserting the single field into that datatype. + */ + private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType, + String memberName, Object theData) throws Exception { + log.trace("writeSingleCompoundMember(): start"); + + H5Datatype dsDatatype = (H5Datatype) this.getDatatype(); + + /* + * Check for any unsupported datatypes before attempting to write this compound + * member. + */ + if (memberType.isVLEN() && !memberType.isVarStr()) { + log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported"); + log.trace("writeSingleCompoundMember(): exit"); + throw new Exception("writing of VL non-strings is not currently supported"); + } + + /* + * Perform any necessary data conversions before writing the data. + */ + Object tmpData = theData; + try { + if (memberType.isUnsigned()) { + // Check if we need to convert unsigned integer data from Java-style + // to C-style integers + long tsize = memberType.getDatatypeSize(); + String cname = theData.getClass().getName(); + char dname = cname.charAt(cname.lastIndexOf('[') + 1); + boolean doIntConversion = (((tsize == 1) && (dname == 'S')) + || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J'))); + + if (doIntConversion) { + log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers"); + tmpData = convertToUnsignedC(theData, null); + } + } + else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) { + log.trace("writeSingleCompoundMember(): converting string array to byte array"); + tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize()); + } + else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) { + log.trace("writeSingleCompoundMember(): converting enum names to values"); + tmpData = memberType.convertEnumNameToValue((String[]) theData); + } + } + catch (Exception ex) { + log.debug("writeSingleCompoundMember(): data conversion failure: ", ex); + tmpData = null; + } + + if (tmpData == null) { + log.debug("writeSingleCompoundMember(): data is null"); + log.trace("writeSingleCompoundMember(): finish"); + return; + } + + /* + * Create a compound datatype containing just a single field (the one which we + * want to write). + */ + long compTid = -1; + try { + compTid = dsDatatype.createCompoundFieldType(memberName); + } + catch (HDF5Exception ex) { + log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ", + memberType.getDescription(), ex); + } + + /* + * Actually write the data now that everything has been setup. + */ + try { + if (memberType.isVarStr()) { + log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}", + dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData); + } + else { + log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}", + dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + // BUG!!! does not write nested compound data and no + // exception was caught. Need to check if it is a java + // error or C library error. + H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData); + } + } + catch (Exception ex) { + log.debug("writeSingleCompoundMember(): write failure: ", ex); + log.trace("writeSingleCompoundMember(): finish"); + throw new Exception("failed to write compound member: " + ex.getMessage(), ex); + } + finally { + dsDatatype.close(compTid); + } + + log.trace("writeSingleCompoundMember(): finish"); + } + + /* + * Private routine to convert datatypes that are read in as byte arrays to + * regular types. + */ + private Object convertByteMember(final H5Datatype dtype, byte[] byteData) { + log.trace("convertByteMember(): start"); + + Object theObj = null; + + if (dtype.getDatatypeSize() == 1) { + /* + * Normal byte[] type, such as an integer datatype of size 1. + */ + theObj = byteData; + } + else if (dtype.isString() && !dtype.isVarStr() && convertByteToString) { + log.trace("convertByteMember(): converting byte array to string array"); + + theObj = byteToString(byteData, (int) dtype.getDatatypeSize()); + } + else if (dtype.isInteger()) { + log.trace("convertByteMember(): converting byte array to integer array"); + + theObj = HDFNativeData.byteToInt(byteData); + } + else if (dtype.isFloat()) { + log.trace("convertByteMember(): converting byte array to float array"); + + theObj = HDFNativeData.byteToFloat(byteData); + } + else if (dtype.isRef()) { + log.trace("convertByteMember(): reference type - converting byte array to long array"); + + theObj = HDFNativeData.byteToLong(byteData); + } + else if (dtype.isArray()) { + H5Datatype baseType = (H5Datatype) dtype.getDatatypeBase(); + + /* + * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes. + */ + while (baseType.isArray()) baseType = (H5Datatype) baseType.getDatatypeBase(); + + /* + * Optimize for the common cases of Arrays. + */ + switch (baseType.getDatatypeClass()) { + case Datatype.CLASS_INTEGER: + case Datatype.CLASS_FLOAT: + case Datatype.CLASS_CHAR: + case Datatype.CLASS_STRING: + case Datatype.CLASS_BITFIELD: + case Datatype.CLASS_OPAQUE: + case Datatype.CLASS_COMPOUND: + case Datatype.CLASS_REFERENCE: + case Datatype.CLASS_ENUM: + case Datatype.CLASS_VLEN: + case Datatype.CLASS_TIME: + theObj = convertByteMember(baseType, byteData); + break; + + case Datatype.CLASS_ARRAY: + { + H5Datatype arrayType = (H5Datatype) dtype.getDatatypeBase(); + + long[] arrayDims = dtype.getArrayDims(); + int arrSize = 1; + for (int i = 0; i < arrayDims.length; i++) { + arrSize *= arrayDims[i]; + } + + theObj = new Object[arrSize]; + + for (int i = 0; i < arrSize; i++) { + byte[] indexedBytes = Arrays.copyOfRange(byteData, (int) (i * arrayType.getDatatypeSize()), + (int) ((i + 1) * arrayType.getDatatypeSize())); + ((Object[]) theObj)[i] = convertByteMember(arrayType, indexedBytes); + } + + break; + } + + case Datatype.CLASS_NO_CLASS: + default: + log.debug("convertByteMember(): invalid datatype class"); + theObj = new String("*ERROR*"); + } + } + else if (dtype.isCompound()) { + /* + * TODO: still valid after reading change? + */ + theObj = convertCompoundByteMembers(dtype, byteData); + } + else { + theObj = byteData; + } + + log.trace("convertByteMember(): finish"); + + return theObj; + } + + /** + * Given an array of bytes representing a compound Datatype, converts each of + * its members into Objects and returns the results. + * + * @param dtype + * The compound datatype to convert + * @param data + * The byte array representing the data of the compound Datatype + * @return The converted types of the bytes + */ + private Object convertCompoundByteMembers(final H5Datatype dtype, byte[] data) { + List theData = null; + + List allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes()); + List localTypes = new ArrayList<>(dtype.getCompoundMemberTypes()); + Iterator localIt = localTypes.iterator(); + while (localIt.hasNext()) { + Datatype curType = localIt.next(); + + if (curType.isCompound()) + continue; + + if (!allSelectedTypes.contains(curType)) + localIt.remove(); + } + + theData = new ArrayList<>(localTypes.size()); + for (int i = 0, index = 0; i < localTypes.size(); i++) { + Datatype curType = localTypes.get(i); + + if (curType.isCompound()) + theData.add(convertCompoundByteMembers((H5Datatype) curType, + Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize()))); + else + theData.add(convertByteMember((H5Datatype) curType, + Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize()))); + + index += curType.getDatatypeSize(); + } + + return theData; + } + + @Override + public Object convertFromUnsignedC() { + throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); + } + + @Override + public Object convertToUnsignedC() { + throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata() + */ + @Override + public List getMetadata() throws HDF5Exception { + return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null)); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata(int...) + */ + public List getMetadata(int... attrPropList) throws HDF5Exception { + log.trace("getMetadata(): start"); + + if (!isInited()) { + init(); + log.trace("getMetadata(): inited"); + } + + try { + this.linkTargetObjName = H5File.getLinkTargetName(this); + } + catch (Exception ex) { + log.debug("getMetadata(): getLinkTargetName failed: ", ex); + } + + if (attributeList != null) { + log.trace("getMetadata(): attributeList != null"); + log.trace("getMetadata(): finish"); + return attributeList; + } + + long did = -1; + long pcid = -1; + long paid = -1; + int indxType = fileFormat.getIndexType(null); + int order = fileFormat.getIndexOrder(null); + + // load attributes first + if (attrPropList.length > 0) { + indxType = attrPropList[0]; + if (attrPropList.length > 1) { + order = attrPropList[1]; + } + } + + attributeList = H5File.getAttribute(this, indxType, order); + log.trace("getMetadata(): attributeList loaded"); + + log.trace("getMetadata(): open dataset"); + did = open(); + if (did >= 0) { + log.trace("getMetadata(): dataset opened"); + try { + // get the compression and chunk information + pcid = H5.H5Dget_create_plist(did); + paid = H5.H5Dget_access_plist(did); + long storageSize = H5.H5Dget_storage_size(did); + int nfilt = H5.H5Pget_nfilters(pcid); + int layoutType = H5.H5Pget_layout(pcid); + + storageLayout.setLength(0); + compression.setLength(0); + + if (layoutType == HDF5Constants.H5D_CHUNKED) { + chunkSize = new long[rank]; + H5.H5Pget_chunk(pcid, rank, chunkSize); + int n = chunkSize.length; + + storageLayout.append("CHUNKED: ").append(chunkSize[0]); + for (int i = 1; i < n; i++) { + storageLayout.append(" X ").append(chunkSize[i]); + } + + if (nfilt > 0) { + long nelmts = 1; + long uncompSize; + long datumSize = getDatatype().getDatatypeSize(); + if (datumSize < 0) { + long tmptid = -1; + try { + tmptid = H5.H5Dget_type(did); + datumSize = H5.H5Tget_size(tmptid); + } + finally { + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex2) { + log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2); + } + } + } + + for (int i = 0; i < rank; i++) { + nelmts *= dims[i]; + } + uncompSize = nelmts * datumSize; + + /* compression ratio = uncompressed size / compressed size */ + + if (storageSize != 0) { + double ratio = (double) uncompSize / (double) storageSize; + DecimalFormat df = new DecimalFormat(); + df.setMinimumFractionDigits(3); + df.setMaximumFractionDigits(3); + compression.append(df.format(ratio)).append(":1"); + } + } + } + else if (layoutType == HDF5Constants.H5D_COMPACT) { + storageLayout.append("COMPACT"); + } + else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) { + storageLayout.append("CONTIGUOUS"); + if (H5.H5Pget_external_count(pcid) > 0) + storageLayout.append(" - EXTERNAL "); + } + else if (layoutType == HDF5Constants.H5D_VIRTUAL) { + storageLayout.append("VIRTUAL - "); + try { + long vmaps = H5.H5Pget_virtual_count(pcid); + try { + int virtView = H5.H5Pget_virtual_view(paid); + long virtGap = H5.H5Pget_virtual_printf_gap(paid); + if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING) + storageLayout.append("First Missing"); + else + storageLayout.append("Last Available"); + storageLayout.append("\nGAP : ").append(virtGap); + } + catch (Exception err) { + log.debug("getMetadata(): vds error: ", err); + storageLayout.append("ERROR"); + } + storageLayout.append("\nMAPS : ").append(vmaps); + if (vmaps > 0) { + for (long next = 0; next < vmaps; next++) { + try { + H5.H5Pget_virtual_vspace(pcid, next); + H5.H5Pget_virtual_srcspace(pcid, next); + String fname = H5.H5Pget_virtual_filename(pcid, next); + String dsetname = H5.H5Pget_virtual_dsetname(pcid, next); + storageLayout.append("\n").append(fname).append(" : ").append(dsetname); + } + catch (Exception err) { + log.debug("getMetadata(): vds space[{}] error: ", next, err); + log.trace("getMetadata(): vds[{}] continue", next); + storageLayout.append("ERROR"); + } + } + } + } + catch (Exception err) { + log.debug("getMetadata(): vds count error: ", err); + storageLayout.append("ERROR"); + } + } + else { + chunkSize = null; + storageLayout.append("NONE"); + } + + int[] flags = { 0, 0 }; + long[] cdNelmts = { 20 }; + int[] cdValues = new int[(int) cdNelmts[0]]; + String[] cdName = { "", "" }; + log.trace("getMetadata(): {} filters in pipeline", nfilt); + int filter = -1; + int[] filterConfig = { 1 }; + + filters.setLength(0); + + if (nfilt == 0) { + filters.append("NONE"); + } + else { + for (int i = 0, k = 0; i < nfilt; i++) { + log.trace("getMetadata(): filter[{}]", i); + if (i > 0) { + filters.append(", "); + } + if (k > 0) { + compression.append(", "); + } + + try { + cdNelmts[0] = 20; + cdValues = new int[(int) cdNelmts[0]]; + cdValues = new int[(int) cdNelmts[0]]; + filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig); + log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]); + for (int j = 0; j < cdNelmts[0]; j++) { + log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]); + } + } + catch (Exception err) { + log.debug("getMetadata(): filter[{}] error: ", i, err); + log.trace("getMetadata(): filter[{}] continue", i); + filters.append("ERROR"); + continue; + } + + if (filter == HDF5Constants.H5Z_FILTER_NONE) { + filters.append("NONE"); + } + else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) { + filters.append("GZIP"); + compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]); + k++; + } + else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) { + filters.append("Error detection filter"); + } + else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) { + filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]); + } + else if (filter == HDF5Constants.H5Z_FILTER_NBIT) { + filters.append("NBIT"); + } + else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) { + filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]); + } + else if (filter == HDF5Constants.H5Z_FILTER_SZIP) { + filters.append("SZIP"); + compression.append("SZIP: Pixels per block = ").append(cdValues[1]); + k++; + int flag = -1; + try { + flag = H5.H5Zget_filter_info(filter); + } + catch (Exception ex) { + log.debug("getMetadata(): H5Zget_filter_info failure: ", ex); + flag = -1; + } + if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) { + compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED"); + } + else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) + || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) { + compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED"); + } + } + else { + filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): "); + for (int j = 0; j < cdNelmts[0]; j++) { + if (j > 0) + filters.append(", "); + filters.append(cdValues[j]); + } + log.debug("getMetadata(): filter[{}] is user defined compression", i); + } + } // (int i=0; i= 0) { + try { + H5.H5Adelete(did, attr.getName()); + List attrList = getMetadata(); + attrList.remove(attr); + nAttributes = attrList.size(); + } + finally { + close(did); + } + } + + log.trace("removeMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#updateMetadata(java.lang.Object) + */ + @Override + public void updateMetadata(Object info) throws HDF5Exception { + log.trace("updateMetadata(): start"); + + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("updateMetadata(): Object not an Attribute"); + log.trace("updateMetadata(): finish"); + return; + } + + nAttributes = -1; + + log.trace("updateMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#setName(java.lang.String) + */ + @Override + public void setName(String newName) throws Exception { + H5File.renameObject(this, newName); + super.setName(newName); + } + + /** + * Resets selection of dataspace + */ + private void resetSelection() { + log.trace("resetSelection(): start"); + + for (int i = 0; i < rank; i++) { + startDims[i] = 0; + selectedDims[i] = 1; + if (selectedStride != null) { + selectedStride[i] = 1; + } + } + + if (rank == 1) { + selectedIndex[0] = 0; + selectedDims[0] = dims[0]; + } + else if (rank == 2) { + selectedIndex[0] = 0; + selectedIndex[1] = 1; + selectedDims[0] = dims[0]; + selectedDims[1] = dims[1]; + } + else if (rank > 2) { + // selectedIndex[0] = rank - 2; // columns + // selectedIndex[1] = rank - 1; // rows + // selectedIndex[2] = rank - 3; + selectedIndex[0] = 0; // width, the fastest dimension + selectedIndex[1] = 1; // height + selectedIndex[2] = 2; // frames + // selectedDims[rank - 1] = dims[rank - 1]; + // selectedDims[rank - 2] = dims[rank - 2]; + selectedDims[selectedIndex[0]] = dims[selectedIndex[0]]; + selectedDims[selectedIndex[1]] = dims[selectedIndex[1]]; + } + + isDataLoaded = false; + setAllMemberSelection(true); + log.trace("resetSelection(): finish"); + } + + /** + * @deprecated Not for public use in the future.
+ * Using + * {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)} + * + * @param name + * the name of the dataset to create. + * @param pgroup + * parent group where the new dataset is created. + * @param dims + * the dimension size of the dataset. + * @param memberNames + * the names of compound datatype + * @param memberDatatypes + * the datatypes of the compound datatype + * @param memberSizes + * the dim sizes of the members + * @param data + * list of data arrays written to the new dataset, null if no data is written to the new + * dataset. + * + * @return the new compound dataset if successful; otherwise returns null. + * + * @throws Exception + * if there is a failure. + */ + @Deprecated + public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames, + Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { + if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null) + || (memberSizes == null)) { + return null; + } + + int nMembers = memberNames.length; + int memberRanks[] = new int[nMembers]; + long memberDims[][] = new long[nMembers][1]; + for (int i = 0; i < nMembers; i++) { + memberRanks[i] = 1; + memberDims[i][0] = memberSizes[i]; + } + + return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data); + } + + /** + * @deprecated Not for public use in the future.
+ * Using + * {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)} + * + * @param name + * the name of the dataset to create. + * @param pgroup + * parent group where the new dataset is created. + * @param dims + * the dimension size of the dataset. + * @param memberNames + * the names of compound datatype + * @param memberDatatypes + * the datatypes of the compound datatype + * @param memberRanks + * the ranks of the members + * @param memberDims + * the dim sizes of the members + * @param data + * list of data arrays written to the new dataset, null if no data is written to the new + * dataset. + * + * @return the new compound dataset if successful; otherwise returns null. + * + * @throws Exception + * if the dataset can not be created. + */ + @Deprecated + public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames, + Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception { + return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks, + memberDims, data); + } + + /** + * Creates a simple compound dataset in a file with/without chunking and compression. + *

+ * This function provides an easy way to create a simple compound dataset in file by hiding tedious + * details of creating a compound dataset from users. + *

+ * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound + * dataset is not supported. The required information to create a compound dataset includes the + * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the + * compound fields. Other information such as chunks, compression and the data buffer is optional. + *

+ * The following example shows how to use this function to create a compound dataset in file. + * + *

+     * H5File file = null;
+     * String message = "";
+     * Group pgroup = null;
+     * int[] DATA_INT = new int[DIM_SIZE];
+     * float[] DATA_FLOAT = new float[DIM_SIZE];
+     * String[] DATA_STR = new String[DIM_SIZE];
+     * long[] DIMs = { 50, 10 };
+     * long[] CHUNKs = { 25, 5 };
+     *
+     * try {
+     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
+     *     file.open();
+     *     pgroup = (Group) file.get("/");
+     * }
+     * catch (Exception ex) {
+     * }
+     *
+     * Vector data = new Vector();
+     * data.add(0, DATA_INT);
+     * data.add(1, DATA_FLOAT);
+     * data.add(2, DATA_STR);
+     *
+     * // create groups
+     * Datatype[] mdtypes = new H5Datatype[3];
+     * String[] mnames = { "int", "float", "string" };
+     * Dataset dset = null;
+     * try {
+     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
+     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
+     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
+     *     dset = file.createCompoundDS("/CompoundDS", pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
+     * }
+     * catch (Exception ex) {
+     *     failed(message, ex, file);
+     *     return 1;
+     * }
+     * 
+ * + * @param name + * the name of the dataset to create. + * @param pgroup + * parent group where the new dataset is created. + * @param dims + * the dimension size of the dataset. + * @param maxdims + * the max dimension size of the dataset. maxdims is set to dims if maxdims = null. + * @param chunks + * the chunk size of the dataset. No chunking if chunk = null. + * @param gzip + * GZIP compression level (1 to 9). 0 or negative values if no compression. + * @param memberNames + * the names of compound datatype + * @param memberDatatypes + * the datatypes of the compound datatype + * @param memberRanks + * the ranks of the members + * @param memberDims + * the dim sizes of the members + * @param data + * list of data arrays written to the new dataset, null if no data is written to the new + * dataset. + * + * @return the new compound dataset if successful; otherwise returns null. + * + * @throws Exception + * if there is a failure. + */ + public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, + String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) + throws Exception { + log.trace("create(): start"); + + H5CompoundDS dataset = null; + String fullPath = null; + long did = -1; + long tid = -1; + long plist = -1; + long sid = -1; + + if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null)) + || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null) + || (memberDims == null)) { + log.debug("create(): one or more parameters are null"); + log.trace("create(): finish"); + return null; + } + + H5File file = (H5File) pgroup.getFileFormat(); + if (file == null) { + log.debug("create(): parent group FileFormat is null"); + log.trace("create(): finish"); + return null; + } + + String path = HObject.SEPARATOR; + if (!pgroup.isRoot()) { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + if (name.endsWith("/")) { + name = name.substring(0, name.length() - 1); + } + int idx = name.lastIndexOf('/'); + if (idx >= 0) { + name = name.substring(idx + 1); + } + } + + fullPath = path + name; + + int typeSize = 0; + int nMembers = memberNames.length; + long[] mTypes = new long[nMembers]; + int memberSize = 1; + for (int i = 0; i < nMembers; i++) { + memberSize = 1; + for (int j = 0; j < memberRanks[i]; j++) { + memberSize *= memberDims[i][j]; + } + + mTypes[i] = -1; + // the member is an array + if ((memberSize > 1) && (!memberDatatypes[i].isString())) { + long tmptid = -1; + if ((tmptid = memberDatatypes[i].createNative()) >= 0) { + try { + mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]); + } + finally { + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex) { + log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex); + } + } + } + } + else { + mTypes[i] = memberDatatypes[i].createNative(); + } + try { + typeSize += H5.H5Tget_size(mTypes[i]); + } + catch (Exception ex) { + log.debug("create(): array create H5Tget_size:", ex); + + while (i > 0) { + try { + H5.H5Tclose(mTypes[i]); + } + catch (HDF5Exception ex2) { + log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2); + } + i--; + } + throw ex; + } + } // (int i = 0; i < nMembers; i++) { + + // setup chunking and compression + boolean isExtentable = false; + if (maxdims != null) { + for (int i = 0; i < maxdims.length; i++) { + if (maxdims[i] == 0) { + maxdims[i] = dims[i]; + } + else if (maxdims[i] < 0) { + maxdims[i] = HDF5Constants.H5S_UNLIMITED; + } + + if (maxdims[i] != dims[i]) { + isExtentable = true; + } + } + } + + // HDF5 requires you to use chunking in order to define extendible + // datasets. Chunking makes it possible to extend datasets efficiently, + // without having to reorganize storage excessively. Using default size + // of 64x...which has good performance + if ((chunks == null) && isExtentable) { + chunks = new long[dims.length]; + for (int i = 0; i < dims.length; i++) + chunks[i] = Math.min(dims[i], 64); + } + + // prepare the dataspace and datatype + int rank = dims.length; + + try { + sid = H5.H5Screate_simple(rank, dims, maxdims); + + // figure out creation properties + plist = HDF5Constants.H5P_DEFAULT; + + tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize); + int offset = 0; + for (int i = 0; i < nMembers; i++) { + H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]); + offset += H5.H5Tget_size(mTypes[i]); + } + + if (chunks != null) { + plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + + H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED); + H5.H5Pset_chunk(plist, rank, chunks); + + // compression requires chunking + if (gzip > 0) { + H5.H5Pset_deflate(plist, gzip); + } + } + + long fid = file.getFID(); + + log.trace("create(): create dataset"); + did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT); + log.trace("create(): new H5CompoundDS"); + dataset = new H5CompoundDS(file, name, path); + } + finally { + try { + H5.H5Pclose(plist); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex); + } + try { + H5.H5Sclose(sid); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex); + } + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Dclose(did {}) failure: ", did, ex); + } + + for (int i = 0; i < nMembers; i++) { + try { + H5.H5Tclose(mTypes[i]); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex); + } + } + } + + if (dataset != null) { + pgroup.addToMemberList(dataset); + if (data != null) { + dataset.init(); + long selected[] = dataset.getSelectedDims(); + for (int i = 0; i < rank; i++) { + selected[i] = dims[i]; + } + dataset.write(data); + } + } + + log.trace("create(): finish"); + return dataset; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#isString(long) + */ + @Override + public boolean isString(long tid) { + boolean b = false; + try { + b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid)); + } + catch (Exception ex) { + b = false; + } + + return b; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getSize(long) + */ + @Override + public long getSize(long tid) { + long tsize = -1; + + try { + tsize = H5.H5Tget_size(tid); + } + catch (Exception ex) { + tsize = -1; + } + + return tsize; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#isVirtual() + */ + @Override + public boolean isVirtual() { + return isVirtual; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getVirtualFilename(int) + */ + @Override + public String getVirtualFilename(int index) { + return (isVirtual) ? virtualNameList.get(index) : null; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getVirtualMaps() + */ + @Override + public int getVirtualMaps() { + return (isVirtual) ? virtualNameList.size() : -1; + } + +} diff --git a/src/main/java/hdf/object/h5/H5Datatype.java b/src/main/java/hdf/object/h5/H5Datatype.java new file mode 100644 index 0000000..605cfac --- /dev/null +++ b/src/main/java/hdf/object/h5/H5Datatype.java @@ -0,0 +1,2164 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.Attribute; +import hdf.object.CompoundDS; +import hdf.object.Datatype; +import hdf.object.FileFormat; + +/** + * This class defines HDF5 datatype characteristics and APIs for a data type. + *

+ * This class provides several methods to convert an HDF5 datatype identifier to a datatype object, and vice versa. A + * datatype object is described by four basic fields: datatype class, size, byte order, and sign, while an HDF5 datatype + * is presented by a datatype identifier. + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public class H5Datatype extends Datatype { + private static final long serialVersionUID = -750546422258749792L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Datatype.class); + + /** + * The list of attributes of this data object. + */ + private List attributeList; + + /** Flag to indicate if this datatype is a named datatype */ + private boolean isNamed = false; + + private boolean isRefObj = false; + + private boolean isRegRef = false; + + private int nAttributes = -1; + + private H5O_info_t objInfo; + + /** + * The native class of the datatype. + */ + private int nativeClass = -1; + + /** + * The native properties of the number datatype. + */ + private long nativePrecision = 0; + private int nativeOffset = -1; + private int nativePadLSB = -1; + private int nativePadMSB = -1; + + /** + * The native properties of the float datatype. + */ + private long nativeFPebias = 0; + private long nativeFPspos = -1; + private long nativeFPepos = -1; + private long nativeFPesize = -1; + private long nativeFPmpos = -1; + private long nativeFPmsize = -1; + private int nativeFPnorm = -1; + private int nativeFPinpad = -1; + + /** + * The native properties of the string datatype. + */ + private int nativeStrPad = -1; + private int nativeStrCSET = -1; + + /** + * The tag for an opaque datatype. + */ + private String opaqueTag = null; + + /** + * Constructs an named HDF5 data type object for a given file, dataset name and group path. + *

+ * The datatype object represents an existing named datatype in file. For example, + * + *

+     * new H5Datatype(file, "dtype1", "/g0")
+     * 
+ * + * constructs a datatype object that corresponds to the dataset,"dset1", at group "/g0". + * + * @param theFile + * the file that contains the dataset. + * @param name + * the name of the dataset such as "dset1". + * @param path + * the group path to the dataset such as "/g0/". + */ + public H5Datatype(FileFormat theFile, String name, String path) { + this(theFile, name, path, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #H5Datatype(FileFormat, String, String)} + * + * @param theFile + * the file that contains the dataset. + * @param name + * the name of the dataset such as "dset1". + * @param path + * the group path to the dataset such as "/g0/". + * @param oid + * the oid of the dataset. + */ + @Deprecated + public H5Datatype(FileFormat theFile, String name, String path, long[] oid) { + super(theFile, name, path, oid); + objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null); + + if ((oid == null) && (theFile != null)) { + // retrieve the object ID + try { + byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1); + this.oid = new long[1]; + this.oid[0] = HDFNativeData.byteToLong(refBuf, 0); + } + catch (Exception ex) { + log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName()); + } + } + + long tid = -1; + if (theFile != null) { + try { + tid = H5.H5Topen(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT); + fromNative(tid); + } + catch (Exception ex) { + log.debug("constructor H5Topen() failure"); + } + finally { + close(tid); + } + } + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of H5Datatype. + *

    + *
  1. to create unsigned native integer
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4. + * Valid values are NATIVE or a positive value. For string datatypes, -1 is also + * a valid value (to create a variable-length string). + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX, + * ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE. + * +* @throws Exception + * if there is an error + */ + public H5Datatype(int tclass, int tsize, int torder, int tsign) throws Exception { + this(tclass, tsize, torder, tsign, null); + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of H5Datatype. + *

    + *
  1. to create unsigned native integer
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4. + * Valid values are NATIVE or a positive value. For string datatypes, -1 is also + * a valid value (to create a variable-length string). + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX, + * ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE. + * @param tbase + * the base datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public H5Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { + this(tclass, tsize, torder, tsign, tbase, null); + } + + /** + * Constructs a Datatype with specified class, size, byte order and sign. + *

+ * The following is a list of a few examples of H5Datatype. + *

    + *
  1. to create unsigned native integer
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE); + *
  2. to create 16-bit signed integer with big endian
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE); + *
  3. to create native float
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE); + *
  4. to create 64-bit double
    + * H5Datatype type = new H5Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE); + *
+ * + * @param tclass + * the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and + * etc. + * @param tsize + * the size of the datatype in bytes, e.g. for a 32-bit integer, the + * size is 4. Valid values are NATIVE or a positive value. For string + * datatypes, -1 is also a valid value (to create a variable-length + * string). + * @param torder + * the byte order of the datatype. Valid values are ORDER_LE, + * ORDER_BE, ORDER_VAX, ORDER_NONE and NATIVE. + * @param tsign + * the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and + * NATIVE. + * @param tbase + * the base datatype of the new datatype + * @param pbase + * the parent datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public H5Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, Datatype pbase) throws Exception { + super(tclass, tsize, torder, tsign, tbase, pbase); + datatypeDescription = getDescription(); + } + + /** + * Constructs a Datatype with a given native datatype identifier. + *

+ * For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5, + * + *

+     * int tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
+     * Datatype dtype = new Datatype(tid);
+     * 
+ * + * will construct a datatype equivalent to new Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.SIGN_NONE); + * + * @see #fromNative(long nativeID) + * + * @param nativeID + * the native datatype identifier. + * +* @throws Exception + * if there is an error + */ + public H5Datatype(long nativeID) throws Exception { + this(nativeID, null); + } + + /** + * Constructs a Datatype with a given native datatype identifier. + *

+ * For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5, + * + *

+     * int tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
+     * Datatype dtype = new Datatype(tid);
+     * 
+ * + * will construct a datatype equivalent to new Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.SIGN_NONE); + * + * @see #fromNative(long nativeID) + * + * @param nativeID + * the native datatype identifier. + * @param pbase + * the parent datatype of the new datatype + * +* @throws Exception + * if there is an error + */ + public H5Datatype(long nativeID, Datatype pbase) throws Exception { + super(nativeID, pbase); + fromNative(nativeID); + datatypeDescription = getDescription(); + } + + /** + * Opens access to a named datatype. + *

+ * It calls H5.H5Topen(loc, name). + * + * @return the datatype identifier if successful; otherwise returns negative value. + * + * @see H5#H5Topen(long, String, long) + */ + @Override + public long open() { + log.trace("open(): start"); + long tid = -1; + + try { + tid = H5.H5Topen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + } + catch (HDF5Exception ex) { + tid = -1; + } + + log.trace("open(): finish"); + return tid; + } + + /** + * Closes a datatype identifier. + *

+ * It calls H5.H5close(tid). + * + * @param tid + * the datatype ID to close + */ + @Override + public void close(long tid) { + if (tid >= 0) { + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("close(): H5Tclose(tid {}) failure: ", tid, ex); + } + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#hasAttribute() + */ + @Override + public boolean hasAttribute() { + objInfo.num_attrs = nAttributes; + + if (objInfo.num_attrs < 0) { + long tid = -1; + try { + tid = H5.H5Topen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + fromNative(tid); + objInfo = H5.H5Oget_info(tid); + isNamed = true; + } + catch (Exception ex) { + objInfo.num_attrs = 0; + } + finally { + close(tid); + } + } + + log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs); + + return (objInfo.num_attrs > 0); + } + + /** + * Converts values in an Enumeration Datatype to names. + *

+ * This method searches the identified enumeration datatype for the values appearing in + * inValues and returns the names corresponding to those values. If a given value is + * not found in the enumeration datatype, the name corresponding to that value will be set to + * "ENUM ERR value" in the string array that is returned. + *

+ * If the method fails in general, null will be returned instead of a String array. An empty + * inValues parameter would cause general failure. + * + * @param inValues + * The array of enumerations values to be converted. + * + * @return The string array of names if successful; otherwise return null. + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + * + */ + public String[] convertEnumValueToName(Object inValues) throws HDF5Exception { + log.trace("convertEnumValueToName() inValues={} start", inValues); + + if (inValues == null) { + log.debug("convertEnumValueToName() failure: in values null "); + log.trace("convertEnumValueToName(): exit"); + return null; + } + + int inSize = 0; + String[] outNames = null; + String cName = inValues.getClass().getName(); + boolean isArray = cName.lastIndexOf('[') >= 0; + if (isArray) { + inSize = Array.getLength(inValues); + } + else { + inSize = 1; + } + + if (inSize <= 0) { + log.debug("convertEnumValueToName() failure: inSize length invalid"); + log.debug("convertEnumValueToName(): inValues={} inSize={}", inValues, inSize); + log.trace("convertEnumValueToName(): exit"); + return null; + } + + if (enumMembers == null || enumMembers.size() <= 0) { + log.debug("convertEnumValueToName(): no members"); + log.trace("convertEnumValueToName(): exit"); + return null; + } + + log.trace("convertEnumValueToName(): inSize={} nMembers={} enums={}", inSize, enumMembers.size(), enumMembers); + outNames = new String[inSize]; + for (int i = 0; i < inSize; i++) { + if (isArray) { + if (enumMembers.containsKey(String.valueOf(Array.get(inValues, i)))) { + outNames[i] = enumMembers.get(String.valueOf(Array.get(inValues, i))); + } + else { + outNames[i] = "**ENUM ERR " + Array.get(inValues, i) + "**"; + } + } + else { + if (enumMembers.containsKey(String.valueOf(inValues))) { + outNames[i] = enumMembers.get(String.valueOf(inValues)); + } + else { + outNames[i] = "**ENUM ERR " + inValues + "**"; + } + } + } + + log.trace("convertEnumValueToName(): finish"); + return outNames; + } + + /** + * Converts names in an Enumeration Datatype to values. + *

+ * This method searches the identified enumeration datatype for the names appearing in + * inValues and returns the values corresponding to those names. + * + * @param in + * The array of enumerations names to be converted. + * + * @return The int array of values if successful; otherwise return null. + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + * + */ + public Object[] convertEnumNameToValue(String[] in) throws HDF5Exception { + log.trace("convertEnumNameToValue() start"); + int size = 0; + + if (in == null) { + log.debug("convertEnumNameToValue() failure: in values null"); + log.trace("convertEnumNameToValue(): exit"); + return null; + } + + if ((size = Array.getLength(in)) <= 0) { + log.debug("convertEnumNameToValue() failure: in size not valid"); + log.trace("convertEnumNameToValue(): exit"); + return null; + } + + if (enumMembers == null || enumMembers.size() <= 0) { + log.debug("convertEnumNameToValue(): no members"); + log.trace("convertEnumNameToValue(): exit"); + return null; + } + + Object[] out = null; + if (datatypeSize == 1) { + out = new Byte[size]; + } + else if (datatypeSize == 2) { + out = new Short[size]; + } + else if (datatypeSize == 4) { + out = new Integer[size]; + } + else if (datatypeSize == 8) { + out = new Long[size]; + } + else { + out = new Object[size]; + } + + for (int i = 0; i < size; i++) { + if (in[i] == null || in[i].length() <= 0) + continue; + + for (Entry entry : enumMembers.entrySet()) { + if (Objects.equals(in[i], entry.getValue())) { + if (datatypeSize == 1) { + log.trace("convertEnumNameToValue(): ENUM is H5T_NATIVE_INT8"); + out[i] = Byte.parseByte(entry.getKey()); + } + else if (datatypeSize == 2) { + log.trace("convertEnumNameToValue(): CLASS_INT-ENUM is H5T_NATIVE_INT16"); + out[i] = Short.parseShort(entry.getKey()); + } + else if (datatypeSize == 4) { + log.trace("convertEnumNameToValue(): CLASS_INT-ENUM is H5T_NATIVE_INT32"); + out[i] = Integer.parseInt(entry.getKey()); + } + else if (datatypeSize == 8) { + log.trace("convertEnumNameToValue(): CLASS_INT-ENUM is H5T_NATIVE_INT64"); + out[i] = Long.parseLong(entry.getKey()); + } + else { + log.debug("convertEnumNameToValue(): enum datatypeSize incorrect"); + out[i] = -1; + } + break; + } + } + } + + log.trace("convertEnumNameToValue(): finish"); + return out; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#fromNative(int) + */ + @Override + public void fromNative(long tid) { + log.trace("fromNative(): start: tid={}", tid); + long tsize = -1; + int torder = -1; + boolean isChar = false; + boolean isUchar = false; + + if (tid < 0) { + datatypeClass = CLASS_NO_CLASS; + } + else { + try { + nativeClass = H5.H5Tget_class(tid); + tsize = H5.H5Tget_size(tid); + isVariableStr = H5.H5Tis_variable_str(tid); + isVLEN = false; + log.trace("fromNative(): tclass={}, tsize={}, torder={}, isVLEN={}", nativeClass, tsize, torder, isVLEN); + } + catch (Exception ex) { + log.debug("fromNative(): failure: ", ex); + datatypeClass = CLASS_NO_CLASS; + } + + try { + isUchar = H5.H5Tequal(tid, HDF5Constants.H5T_NATIVE_UCHAR); + isChar = (H5.H5Tequal(tid, HDF5Constants.H5T_NATIVE_CHAR) || isUchar); + log.trace("fromNative(): tclass={}, tsize={}, torder={}, isVLEN={}", nativeClass, tsize, torder, isVLEN); + } + catch (Exception ex) { + log.debug("fromNative(): native char type failure: ", ex); + } + + datatypeOrder = HDF5Constants.H5T_ORDER_NONE; + if (datatypeIsAtomic(tid) || (nativeClass == HDF5Constants.H5T_COMPOUND)) { + try { + torder = H5.H5Tget_order(tid); + datatypeOrder = (torder == HDF5Constants.H5T_ORDER_BE) ? ORDER_BE : ORDER_LE; + } + catch (Exception ex) { + log.debug("fromNative(): get_order failure: ", ex); + } + } + + if (datatypeIsAtomic(tid)) { + try { + nativePrecision = H5.H5Tget_precision_long(tid); + } + catch (Exception ex) { + log.debug("fromNative(): get_precision failure: ", ex); + } + + try { + nativeOffset = H5.H5Tget_offset(tid); + } + catch (Exception ex) { + log.debug("fromNative(): get_offset failure: ", ex); + } + + try { + int[] pads = new int[2]; + H5.H5Tget_pad(tid, pads); + nativePadLSB = pads[0]; + nativePadMSB = pads[1]; + } + catch (Exception ex) { + log.debug("fromNative(): get_pad failure: ", ex); + } + } + + log.trace("fromNative(): isUchar={}, nativePrecision={}, nativeOffset={}, nativePadLSB={}, nativePadMSB={}", isUchar, nativePrecision, nativeOffset, nativePadLSB, + nativePadMSB); + + datatypeSign = NATIVE; // default + if (nativeClass == HDF5Constants.H5T_ARRAY) { + long tmptid = -1; + datatypeClass = CLASS_ARRAY; + try { + int ndims = H5.H5Tget_array_ndims(tid); + arrayDims = new long[ndims]; + H5.H5Tget_array_dims(tid, arrayDims); + + tmptid = H5.H5Tget_super(tid); + baseType = new H5Datatype(tmptid, this); + if (baseType == null) { + log.debug("fromNative(): ARRAY datatype has null base type"); + throw new Exception("Datatype (ARRAY) has no base datatype"); + } + + datatypeSign = baseType.getDatatypeSign(); + } + catch (Exception ex) { + log.debug("fromNative(): array type failure: ", ex); + } + finally { + close(tmptid); + } + log.trace("fromNative(): array type finish"); + } + else if (nativeClass == HDF5Constants.H5T_COMPOUND) { + datatypeClass = CLASS_COMPOUND; + + try { + int nMembers = H5.H5Tget_nmembers(tid); + compoundMemberNames = new Vector<>(nMembers); + compoundMemberTypes = new Vector<>(nMembers); + compoundMemberOffsets = new Vector<>(nMembers); + log.trace("fromNative(): compound type nMembers={} start", nMembers); + + for (int i = 0; i < nMembers; i++) { + String memberName = H5.H5Tget_member_name(tid, i); + log.trace("fromNative(): compound type [{}] name={} start", i, memberName); + long memberOffset = H5.H5Tget_member_offset(tid, i); + long memberID = -1; + H5Datatype membertype = null; + try { + memberID = H5.H5Tget_member_type(tid, i); + membertype = new H5Datatype(memberID, this); + } + catch (Exception ex1) { + log.debug("fromNative(): compound type failure: ", ex1); + } + finally { + close(memberID); + } + + compoundMemberNames.add(i, memberName); + compoundMemberOffsets.add(i, memberOffset); + compoundMemberTypes.add(i, membertype); + } + } + catch (HDF5LibraryException ex) { + log.debug("fromNative(): compound type failure: ", ex); + } + log.trace("fromNative(): compound type finish"); + } + else if (nativeClass == HDF5Constants.H5T_INTEGER) { + datatypeClass = CLASS_INTEGER; + try { + log.trace("fromNative(): integer type"); + int tsign = H5.H5Tget_sign(tid); + datatypeSign = (tsign == HDF5Constants.H5T_SGN_NONE) ? SIGN_NONE : SIGN_2; + } + catch (Exception ex) { + log.debug("fromNative(): int type failure: ", ex); + } + } + else if (nativeClass == HDF5Constants.H5T_FLOAT) { + datatypeClass = CLASS_FLOAT; + try { + nativeFPebias = H5.H5Tget_ebias_long(tid); + } + catch (Exception ex) { + log.debug("fromNative(): get_ebias failure: ", ex); + } + try { + long[] fields = new long[5]; + H5.H5Tget_fields(tid, fields); + nativeFPspos = fields[0]; + nativeFPepos = fields[1]; + nativeFPesize = fields[2]; + nativeFPmpos = fields[3]; + nativeFPmsize = fields[4]; + } + catch (Exception ex) { + log.debug("fromNative(): get_fields failure: ", ex); + } + try { + nativeFPnorm = H5.H5Tget_norm(tid); + } + catch (Exception ex) { + log.debug("fromNative(): get_norm failure: ", ex); + } + try { + nativeFPinpad = H5.H5Tget_inpad(tid); + } + catch (Exception ex) { + log.debug("fromNative(): get_inpad failure: ", ex); + } + } + else if (isChar) { + datatypeClass = CLASS_CHAR; + datatypeSign = (isUchar) ? SIGN_NONE : SIGN_2; + log.trace("fromNative(): CLASS_CHAR:datatypeSign={}", datatypeSign); + } + else if (nativeClass == HDF5Constants.H5T_STRING) { + datatypeClass = CLASS_STRING; + try { + isVLEN = H5.H5Tdetect_class(tid, HDF5Constants.H5T_VLEN) || isVariableStr; + log.trace("fromNative(): H5T_STRING:var str type={}", isVLEN); + nativeStrPad = H5.H5Tget_strpad(tid); + } + catch (Exception ex) { + log.debug("fromNative(): var str type failure: ", ex); + } + try { + nativeStrCSET = H5.H5Tget_cset(tid); + } + catch (Exception ex) { + log.debug("fromNative(): H5T_STRING:get_cset failure: ", ex); + } + log.trace("fromNative(): H5T_STRING:nativeStrPad={}, nativeStrCSET={}", nativeStrPad, nativeStrCSET); + } + else if (nativeClass == HDF5Constants.H5T_REFERENCE) { + datatypeClass = CLASS_REFERENCE; + log.trace("fromNative(): reference type"); + try { + isRegRef = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_DSETREG); + } + catch (Exception ex) { + log.debug("fromNative(): H5T_STD_REF_DSETREG: ", ex); + } + try { + isRefObj = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ); + } + catch (Exception ex) { + log.debug("fromNative(): H5T_STD_REF_OBJ: ", ex); + } + } + else if (nativeClass == HDF5Constants.H5T_ENUM) { + datatypeClass = CLASS_ENUM; + long tmptid = -1; + long basetid = -1; + try { + log.trace("fromNative(): enum type"); + basetid = H5.H5Tget_super(tid); + tmptid = basetid; + basetid = H5.H5Tget_native_type(tmptid); + log.trace("fromNative(): enum type basetid={}", basetid); + if (basetid >= 0) { + baseType = new H5Datatype(tmptid, this); + datatypeSign = baseType.getDatatypeSign(); + } + } + catch (Exception ex) { + log.debug("fromNative(): enum type failure: ", ex); + } + finally { + close(tmptid); + close(basetid); + } + try { + int enumMemberCount = H5.H5Tget_nmembers(tid); + String name = null; + String enumStr = null; + byte[] val = new byte[(int)tsize]; + enumMembers = new HashMap<>(); + for (int i = 0; i < enumMemberCount; i++) { + name = H5.H5Tget_member_name(tid, i); + H5.H5Tget_member_value(tid, i, val); + switch ((int)H5.H5Tget_size(tid)) { + case 1: + enumStr = Byte.toString((HDFNativeData.byteToByte(val[0]))[0]); + break; + case 2: + enumStr = Short.toString((HDFNativeData.byteToShort(val))[0]); + break; + case 4: + enumStr = Integer.toString((HDFNativeData.byteToInt(val))[0]); + break; + case 8: + enumStr = Long.toString((HDFNativeData.byteToLong(val))[0]); + break; + default: + enumStr = "-1"; + break; + } + enumMembers.put(enumStr, name); + } + } + catch (Exception ex) { + log.debug("fromNative(): enum type failure: ", ex); + } + } + else if (nativeClass == HDF5Constants.H5T_VLEN) { + long tmptid = -1; + datatypeClass = CLASS_VLEN; + isVLEN = true; + try { + log.trace("fromNative(): vlen type"); + tmptid = H5.H5Tget_super(tid); + baseType = new H5Datatype(tmptid, this); + if (baseType == null) { + log.debug("fromNative(): VLEN datatype has null base type"); + throw new Exception("Datatype (VLEN) has no base datatype"); + } + + datatypeSign = baseType.getDatatypeSign(); + } + catch (Exception ex) { + log.debug("fromNative(): vlen type failure: ", ex); + } + finally { + close(tmptid); + } + } + else if (nativeClass == HDF5Constants.H5T_BITFIELD) { + datatypeClass = CLASS_BITFIELD; + } + else if (nativeClass == HDF5Constants.H5T_OPAQUE) { + datatypeClass = CLASS_OPAQUE; + + try { + opaqueTag = H5.H5Tget_tag(tid); + } + catch (Exception ex) { + log.debug("fromNative(): opaque type tag retrieval failed: ", ex); + opaqueTag = null; + } + } + else { + log.debug("fromNative(): datatypeClass is unknown"); + } + + datatypeSize = (isVLEN && !isVariableStr) ? HDF5Constants.H5T_VL_T : tsize; + } + log.trace("fromNative(): datatypeClass={} baseType={} datatypeSize={}", datatypeClass, baseType, datatypeSize); + log.trace("fromNative(): finish"); + } + + /** + * @param tid + * the datatype identification disk. + * + * @return the memory datatype identifier if successful, and negative otherwise. + */ + public static long toNative(long tid) { + // data type information + log.trace("toNative(): tid={} start", tid); + long nativeID = -1; + + try { + nativeID = H5.H5Tget_native_type(tid); + } + catch (Exception ex) { + log.debug("toNative(): H5Tget_native_type(tid {}) failure: ", tid, ex); + } + + try { + if (H5.H5Tis_variable_str(tid)) + H5.H5Tset_size(nativeID, HDF5Constants.H5T_VARIABLE); + } + catch (Exception ex) { + log.debug("toNative(): var str type size failure: ", ex); + } + + return nativeID; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#createNative() + */ + @SuppressWarnings("rawtypes") + @Override + public long createNative() { + log.trace("createNative(): start"); + + long tid = -1; + long tmptid = -1; + + if (isNamed) { + try { + tid = H5.H5Topen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("createNative(): name {} H5Topen failure: ", getPath() + getName(), ex); + } + } + + if (tid >= 0) { + log.trace("createNative(): tid >= 0"); + log.trace("createNative(): finish"); + return tid; + } + + log.trace("createNative(): datatypeClass={} datatypeSize={} baseType={}", datatypeClass, datatypeSize, + baseType); + + switch (datatypeClass) { + case CLASS_ARRAY: + try { + if (baseType == null) { + log.debug("createNative(): CLASS_ARRAY base type is NULL"); + break; + } + + if ((tmptid = baseType.createNative()) < 0) { + log.debug("createNative(): failed to create native datatype for ARRAY base datatype"); + break; + } + + tid = H5.H5Tarray_create(tmptid, arrayDims.length, arrayDims); + } + catch (Exception ex) { + log.debug("createNative(): native array datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + finally { + close(tmptid); + } + + break; + case CLASS_COMPOUND: + try { + tid = H5.H5Tcreate(CLASS_COMPOUND, datatypeSize); + + for (int i = 0; i < compoundMemberTypes.size(); i++) { + H5Datatype memberType = null; + String memberName = null; + long memberOffset = -1; + + try { + memberType = (H5Datatype) compoundMemberTypes.get(i); + } + catch (Exception ex) { + log.debug("createNative(): get compound member[{}] type failure: ", i, ex); + memberType = null; + } + + try { + memberName = compoundMemberNames.get(i); + } + catch (Exception ex) { + log.debug("createNative(): get compound member[{}] name failure: ", i, ex); + memberName = null; + } + + try { + memberOffset = compoundMemberOffsets.get(i); + } + catch (Exception ex) { + log.debug("createNative(): get compound member[{}] offset failure: ", i, ex); + memberOffset = -1; + } + + long memberID = -1; + try { + memberID = memberType.createNative(); + log.trace("createNative(): {} member[{}] with offset={} ID={}: ", memberName, i, + memberOffset, memberID); + + H5.H5Tinsert(tid, memberName, memberOffset, memberID); + } + catch (Exception ex) { + log.debug("createNative(): compound type member[{}] insertion failure: ", i, ex); + } + finally { + close(memberID); + } + } + } + catch (Exception ex) { + log.debug("createNative(): native compound datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + break; + case CLASS_INTEGER: + log.trace("createNative(): CLASS_INT of size {}", datatypeSize); + + try { + switch ((int) datatypeSize) { + case 1: + log.trace("createNative(): CLASS_INT is H5T_NATIVE_INT8"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT8); + break; + case 2: + log.trace("createNative(): CLASS_INT is H5T_NATIVE_INT16"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT16); + break; + case 4: + log.trace("createNative(): CLASS_INT is H5T_NATIVE_INT32"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT32); + break; + case 8: + log.trace("createNative(): CLASS_INT is H5T_NATIVE_INT64"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT64); + break; + default: + if (datatypeSize == NATIVE) { + log.trace("createNative(): CLASS_INT is H5T_NATIVE_INT"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT); + } + else { + /* Custom sized integer */ + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT8); + H5.H5Tset_size(tid, datatypeSize); + H5.H5Tset_precision(tid, 8 * datatypeSize); + } + break; + } + + if (datatypeOrder == Datatype.ORDER_BE) { + log.trace("createNative(): CLASS_INT order is H5T_ORDER_BE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_BE); + } + else if (datatypeOrder == Datatype.ORDER_LE) { + log.trace("createNative(): CLASS_INT order is H5T_ORDER_LE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_LE); + } + + if (datatypeSign == Datatype.SIGN_NONE) { + log.trace("createNative(): CLASS_INT sign is H5T_SGN_NONE"); + H5.H5Tset_sign(tid, HDF5Constants.H5T_SGN_NONE); + } + } + catch (Exception ex) { + log.debug("createNative(): native integer datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_ENUM: + try { + if (baseType != null) { + if ((tmptid = baseType.createNative()) < 0) { + log.debug("createNative(): failed to create native type for ENUM base datatype"); + break; + } + + tid = H5.H5Tenum_create(tmptid); + } + else { + if (datatypeSize == NATIVE) + datatypeSize = H5.H5Tget_size(HDF5Constants.H5T_NATIVE_INT); + + tid = H5.H5Tcreate(HDF5Constants.H5T_ENUM, datatypeSize); + } + + if (datatypeOrder == Datatype.ORDER_BE) { + log.trace("createNative(): CLASS_ENUM order is H5T_ORDER_BE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_BE); + } + else if (datatypeOrder == Datatype.ORDER_LE) { + log.trace("createNative(): CLASS_ENUM order is H5T_ORDER_LE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_LE); + } + + if (datatypeSign == Datatype.SIGN_NONE) { + log.trace("createNative(): CLASS_ENUM sign is H5T_SGN_NONE"); + H5.H5Tset_sign(tid, HDF5Constants.H5T_SGN_NONE); + } + } + catch (Exception ex) { + log.debug("createNative(): native enum datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + finally { + close(tmptid); + } + + break; + case CLASS_FLOAT: + try { + tid = H5.H5Tcopy((datatypeSize == 8) ? HDF5Constants.H5T_NATIVE_DOUBLE : HDF5Constants.H5T_NATIVE_FLOAT); + + if (datatypeOrder == Datatype.ORDER_BE) { + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_BE); + } + else if (datatypeOrder == Datatype.ORDER_LE) { + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_LE); + } + + if (nativeFPebias > 0) { + H5.H5Tset_ebias(tid, nativeFPebias); + } + + if (nativeFPnorm >= 0) { + H5.H5Tset_norm(tid, nativeFPnorm); + } + + if (nativeFPinpad >= 0) { + H5.H5Tset_inpad(tid, nativeFPinpad); + } + + if ((nativeFPesize >= 0) && (nativeFPmsize >= 0)) { + H5.H5Tset_fields(tid, nativeFPspos, nativeFPmpos, nativeFPesize, nativeFPmpos, nativeFPmsize); + } + } + catch (Exception ex) { + log.debug("createNative(): native floating-point datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_CHAR: + try { + tid = H5.H5Tcopy((datatypeSign == Datatype.SIGN_NONE) ? HDF5Constants.H5T_NATIVE_UCHAR + : HDF5Constants.H5T_NATIVE_CHAR); + } + catch (Exception ex) { + log.debug("createNative(): native character datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_STRING: + try { + tid = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + + H5.H5Tset_size(tid, (isVLEN || datatypeSize < 0) ? HDF5Constants.H5T_VARIABLE : datatypeSize); + + log.trace("createNative(): isVlenStr={} nativeStrPad={} nativeStrCSET={}", isVLEN, nativeStrPad, + nativeStrCSET); + + H5.H5Tset_strpad(tid, (nativeStrPad >= 0) ? nativeStrPad : HDF5Constants.H5T_STR_NULLTERM); + + if (nativeStrCSET >= 0) { + H5.H5Tset_cset(tid, nativeStrCSET); + } + } + catch (Exception ex) { + log.debug("createNative(): native string datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_REFERENCE: + try { + long objRefTypeSize = H5.H5Tget_size(HDF5Constants.H5T_STD_REF_OBJ); + + tid = H5.H5Tcopy((datatypeSize > objRefTypeSize) ? HDF5Constants.H5T_STD_REF_DSETREG + : HDF5Constants.H5T_STD_REF_OBJ); + } + catch (Exception ex) { + log.debug("createNative(): native reference datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_VLEN: + try { + if (baseType == null) { + log.debug("createNative(): CLASS_VLEN base type is NULL"); + break; + } + + if ((tmptid = baseType.createNative()) < 0) { + log.debug("createNative(): failed to create native datatype for VLEN base datatype"); + break; + } + + tid = H5.H5Tvlen_create(tmptid); + } + catch (Exception ex) { + log.debug("createNative(): native variable-length datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + finally { + close(tmptid); + } + + break; + case CLASS_BITFIELD: + log.trace("createNative(): CLASS_BITFIELD size is {}", datatypeSize); + + try { + switch ((int) datatypeSize) { + case 1: + log.trace("createNative(): CLASS_BITFIELD is H5T_NATIVE_B8"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_B8); + break; + case 2: + log.trace("createNative(): CLASS_BITFIELD is H5T_NATIVE_B16"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_B16); + break; + case 4: + log.trace("createNative(): CLASS_BITFIELD is H5T_NATIVE_B32"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_B32); + break; + case 8: + log.trace("createNative(): CLASS_BITFIELD is H5T_NATIVE_B64"); + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_B64); + break; + default: + if (datatypeSize == NATIVE) + datatypeSize = 1; + + /* Custom sized bitfield */ + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_B8); + H5.H5Tset_size(tid, datatypeSize); + H5.H5Tset_precision(tid, 8 * datatypeSize); + + break; + } + + if (datatypeOrder == Datatype.ORDER_BE) { + log.trace("createNative(): CLASS_BITFIELD order is H5T_ORDER_BE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_BE); + } + else if (datatypeOrder == Datatype.ORDER_LE) { + log.trace("createNative(): CLASS_BITFIELD order is H5T_ORDER_LE"); + H5.H5Tset_order(tid, HDF5Constants.H5T_ORDER_LE); + } + } + catch (Exception ex) { + log.debug("createNative(): native bitfield datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + case CLASS_OPAQUE: + log.trace("createNative(): CLASS_OPAQUE is {}-byte H5T_OPAQUE", datatypeSize); + + try { + if (datatypeSize == NATIVE) + tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_OPAQUE); + else + tid = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, datatypeSize); + + if (opaqueTag != null) { + H5.H5Tset_tag(tid, opaqueTag); + } + } + catch (Exception ex) { + log.debug("createNative(): native opaque datatype creation failed: ", ex); + if (tid >= 0) close(tid); + tid = -1; + } + + break; + default: + log.debug("createNative(): Unknown class"); + break; + } // (tclass) + + // set up enum members + if (datatypeClass == CLASS_ENUM) { + try { + String memstr; + String memname; + byte[] memval = null; + if (datatypeSize == 1) { + memval = HDFNativeData.byteToByte(new Byte((byte) 0)); + } + else if (datatypeSize == 2) { + memval = HDFNativeData.shortToByte(new Short((short) 0)); + } + else if (datatypeSize == 4) { + memval = HDFNativeData.intToByte(new Integer(0)); + } + else if (datatypeSize == 8) { + memval = HDFNativeData.longToByte(new Long(0)); + } + + // using "0" and "1" as default + if (enumMembers == null) { + enumMembers = new HashMap<>(); + enumMembers.put("1", "0"); + enumMembers.put("2", "1"); + log.trace("createNative(): default string"); + } + Iterator entries = enumMembers.entrySet().iterator(); + while (entries.hasNext()) { + Entry thisEntry = (Entry) entries.next(); + memstr = (String) thisEntry.getKey(); + memname = (String) thisEntry.getValue(); + + if (datatypeSize == 1) { + log.trace("createNative(): CLASS_INT-ENUM is H5T_NATIVE_INT8"); + Byte tval = Byte.parseByte(memstr); + memval = HDFNativeData.byteToByte(tval); + } + else if (datatypeSize == 2) { + log.trace("createNative(): CLASS_INT-ENUM is H5T_NATIVE_INT16"); + Short tval = Short.parseShort(memstr); + memval = HDFNativeData.shortToByte(tval); + } + else if (datatypeSize == 4) { + log.trace("createNative(): CLASS_INT-ENUM is H5T_NATIVE_INT32"); + Integer tval = Integer.parseInt(memstr); + memval = HDFNativeData.intToByte(tval); + } + else if (datatypeSize == 8) { + log.trace("createNative(): CLASS_INT-ENUM is H5T_NATIVE_INT64"); + Long tval = Long.parseLong(memstr); + memval = HDFNativeData.longToByte(tval); + } + else { + log.debug("createNative(): enum datatypeSize incorrect"); + } + log.trace("createNative(): H5Tenum_insert {} {}", memname, memval); + H5.H5Tenum_insert(tid, memname, memval); + } + } + catch (Exception ex) { + log.debug("createNative(): set up enum members failure: ", ex); + } + } // (datatypeClass == CLASS_ENUM) + + try { + tmptid = tid; + tid = H5.H5Tget_native_type(tmptid); + } + catch (HDF5Exception ex) { + log.debug("createNative(): H5Tget_native_type({}) failure: ", tmptid, ex); + } + finally { + close(tmptid); + } + + return tid; + } + + /** + * Allocates a one-dimensional array of byte, short, int, long, float, double, + * or String to store data in memory. + * + * For example, + * + *

+     * long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_INT32);
+     * int[] data = (int[]) H5Datatype.allocateArray(datatype, 100);
+     * 
+ * + * returns a 32-bit integer array of size 100. + * + * @param dtype + * the type. + * @param nPoints + * the total number of data points of the array. + * + * @return the array object if successful; otherwise, return null. + * + * @throws OutOfMemoryError + * If there is a failure. + */ + public static final Object allocateArray(final H5Datatype dtype, int nPoints) throws OutOfMemoryError { + log.trace("allocateArray(): start: nPoints={}", nPoints); + + Object data = null; + H5Datatype baseType = (H5Datatype) dtype.getDatatypeBase(); + int typeClass = dtype.getDatatypeClass(); + long typeSize = dtype.getDatatypeSize(); + + if (nPoints < 0) { + log.debug("allocateArray(): nPoints < 0"); + log.trace("allocateArray(): finish"); + return null; + } + + // Scalar members have dimensionality zero, i.e. size =0 + // what can we do about it, set the size to 1 + if (nPoints == 0) { + nPoints = 1; + } + + log.trace("allocateArray(): tclass={} : tsize={}", typeClass, typeSize); + + if (dtype.isVarStr() || dtype.isVLEN() || dtype.isRegRef()) { + log.trace("allocateArray(): is_variable_str={} || isVL={} || is_reg_ref={}", dtype.isVarStr(), dtype.isVLEN(), dtype.isRegRef()); + + data = new String[nPoints]; + for (int i = 0; i < nPoints; i++) { + ((String[]) data)[i] = ""; + } + } + else if (typeClass == HDF5Constants.H5T_INTEGER) { + log.trace("allocateArray(): class H5T_INTEGER"); + + switch ((int) typeSize) { + case 1: + data = new byte[nPoints]; + break; + case 2: + data = new short[nPoints]; + break; + case 4: + data = new int[nPoints]; + break; + case 8: + data = new long[nPoints]; + break; + default: + break; + } + } + else if (typeClass == HDF5Constants.H5T_ENUM) { + log.trace("allocateArray(): class H5T_ENUM"); + + if (baseType != null) + data = H5Datatype.allocateArray(baseType, nPoints); + else + data = new byte[(int) (nPoints * typeSize)]; + } + else if (typeClass == HDF5Constants.H5T_COMPOUND) { + log.trace("allocateArray(): class H5T_COMPOUND"); + + data = new ArrayList<>(dtype.getCompoundMemberTypes().size()); + } + else if (typeClass == HDF5Constants.H5T_FLOAT) { + log.trace("allocateArray(): class H5T_FLOAT"); + + switch ((int) typeSize) { + case 4: + data = new float[nPoints]; + break; + case 8: + data = new double[nPoints]; + break; + default: + break; + } + } + else if ((typeClass == HDF5Constants.H5T_STRING) || (typeClass == HDF5Constants.H5T_REFERENCE)) { + log.trace("allocateArray(): class H5T_STRING || H5T_REFERENCE"); + + data = new byte[(int) (nPoints * typeSize)]; + } + else if (typeClass == HDF5Constants.H5T_ARRAY) { + log.trace("allocateArray(): class H5T_ARRAY"); + + try { + log.trace("allocateArray(): ArrayRank={}", dtype.getArrayDims().length); + + // Use the base datatype to define the array + long[] arrayDims = dtype.getArrayDims(); + int asize = nPoints; + for (int j = 0; j < arrayDims.length; j++) { + log.trace("allocateArray(): Array dims[{}]={}", j, arrayDims[j]); + + asize *= arrayDims[j]; + } + + if (baseType != null) { + data = H5Datatype.allocateArray(baseType, asize); + } + } + catch (Exception ex) { + log.debug("allocateArray(): H5T_ARRAY class failure: ", ex); + } + } + else if ((typeClass == HDF5Constants.H5T_OPAQUE) || (typeClass == HDF5Constants.H5T_BITFIELD)) { + log.trace("allocateArray(): class H5T_OPAQUE || H5T_BITFIELD"); + + data = new byte[(int) (nPoints * typeSize)]; + } + else { + log.debug("allocateArray(): class ???? ({})", typeClass); + + data = null; + } + + log.trace("allocateArray(): finish"); + + return data; + } + + /** + * Returns the size (in bytes) of a given datatype identifier. + *

+ * It basically just calls H5Tget_size(tid). + * + * @param tid + * The datatype identifier. + * + * @return The size of the datatype in bytes. + * + * @see H5#H5Tget_size(long) + */ + public static final long getDatatypeSize(long tid) { + // data type information + long tsize = -1; + + try { + tsize = H5.H5Tget_size(tid); + } + catch (Exception ex) { + tsize = -1; + } + + return tsize; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#getDescription() + */ + @Override + public String getDescription() { + log.trace("getDescription(): start"); + + if (datatypeDescription != null) { + log.trace("getDescription(): finish"); + return datatypeDescription; + } + + StringBuilder description = new StringBuilder(); + long tid = -1; + + switch (datatypeClass) { + case CLASS_CHAR: + description.append("8-bit ").append(isUnsigned() ? "unsigned " : "").append("integer"); + break; + case CLASS_INTEGER: + if (datatypeSize == NATIVE) + description.append("native ").append(isUnsigned() ? "unsigned " : "").append("integer"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit ").append(isUnsigned() ? "unsigned " : "").append("integer"); + break; + case CLASS_FLOAT: + if (datatypeSize == NATIVE) + description.append("native floating-point"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit floating-point"); + break; + case CLASS_STRING: + description.append("String, length = ").append(isVarStr() ? "variable" : datatypeSize); + + try { + tid = createNative(); + if (tid >= 0) { + String strPadType; + String strCSETType; + int strPad = H5.H5Tget_strpad(tid); + int strCSET = H5.H5Tget_cset(tid); + + if (strPad == HDF5Constants.H5T_STR_NULLTERM) + strPadType = "H5T_STR_NULLTERM"; + else if (strPad == HDF5Constants.H5T_STR_NULLPAD) + strPadType = "H5T_STR_NULLPAD"; + else if (strPad == HDF5Constants.H5T_STR_SPACEPAD) + strPadType = "H5T_STR_SPACEPAD"; + else + strPadType = null; + + if (strPadType != null) + description.append(", padding = ").append(strPadType); + + if (strCSET == HDF5Constants.H5T_CSET_ASCII) + strCSETType = "H5T_CSET_ASCII"; + else if (strCSET == HDF5Constants.H5T_CSET_UTF8) + strCSETType = "H5T_CSET_UTF8"; + else + strCSETType = null; + + if (strCSETType != null) + description.append(", cset = ").append(strCSETType); + } + else { + log.debug("createNative() failure"); + } + } + catch (Exception ex) { + log.debug("H5Tget_strpad failure: ", ex); + } + finally { + close(tid); + } + break; + case CLASS_BITFIELD: + if (datatypeSize == NATIVE) + description.append("native bitfield"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit bitfield"); + break; + case CLASS_OPAQUE: + if (datatypeSize == NATIVE) + description.append("native Opaque"); + else + description.append(String.valueOf(datatypeSize)).append("-byte Opaque"); + + if (opaqueTag != null) { + description.append(", tag = ").append(opaqueTag); + } + + break; + case CLASS_COMPOUND: + description.append("Compound"); + + if ((compoundMemberTypes != null) && !compoundMemberTypes.isEmpty()) { + Iterator memberNames = null; + Iterator memberTypes = compoundMemberTypes.iterator(); + + if (compoundMemberNames != null) + memberNames = compoundMemberNames.iterator(); + + description.append(" {"); + + while (memberTypes.hasNext()) { + if (memberNames != null && memberNames.hasNext()) { + description.append(memberNames.next()).append(" = "); + } + + description.append(memberTypes.next().getDescription()); + + if (memberTypes.hasNext()) + description.append(", "); + } + + description.append("}"); + } + + break; + case CLASS_REFERENCE: + description.append("Reference"); + + try { + boolean isRegionType = false; + + tid = createNative(); + if (tid >= 0) { + isRegionType = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_DSETREG); + + description.setLength(0); + if (isRegionType) { + description.append("Dataset region reference"); + } + else { + description.append("Object reference"); + } + } + } + catch (Exception ex) { + log.debug("H5.H5Tequal failure: ", ex); + } + finally { + close(tid); + } + + break; + case CLASS_ENUM: + if (datatypeSize == NATIVE) + description.append("native enum"); + else + description.append(String.valueOf(datatypeSize * 8)).append("-bit enum"); + + String members = getEnumMembersAsString(); + if (members != null) + description.append(" (").append(members).append(")"); + + break; + case CLASS_VLEN: + description.append("Variable-length"); + + if (baseType != null) { + description.append(" of ").append(baseType.getDescription()); + } + + break; + case CLASS_ARRAY: + description.append("Array"); + + if (arrayDims != null) { + description.append(" ["); + for (int i = 0; i < arrayDims.length; i++) { + description.append(arrayDims[i]); + if (i < arrayDims.length - 1) + description.append(" x "); + } + description.append("]"); + } + + if (baseType != null) { + description.append(" of ").append(baseType.getDescription()); + } + + break; + default: + description.append("Unknown"); + break; + } + + log.trace("getDescription(): finish"); + return description.toString(); + } + + /** + * Checks if a datatype specified by the identifier is an unsigned integer. + * + * @param tid + * the datatype ID to be checked. + * + * @return true is the datatype is an unsigned integer; otherwise returns false. + */ + public static final boolean isUnsigned(long tid) { + boolean unsigned = false; + + if (tid >= 0) { + try { + int tclass = H5.H5Tget_class(tid); + log.trace("isUnsigned(): tclass = {}", tclass); + if (tclass != HDF5Constants.H5T_FLOAT && tclass != HDF5Constants.H5T_STRING + && tclass != HDF5Constants.H5T_REFERENCE && tclass != HDF5Constants.H5T_BITFIELD + && tclass != HDF5Constants.H5T_OPAQUE && tclass != HDF5Constants.H5T_VLEN + && tclass != HDF5Constants.H5T_COMPOUND && tclass != HDF5Constants.H5T_ARRAY) { + int tsign = H5.H5Tget_sign(tid); + if (tsign == HDF5Constants.H5T_SGN_NONE) { + unsigned = true; + } + else { + log.trace("isUnsigned(): not unsigned"); + } + } + else { + log.trace("isUnsigned(): tclass not integer type"); + } + } + catch (Exception ex) { + log.debug("isUnsigned(): Datatype {} failure", tid, ex); + unsigned = false; + } + } + else { + log.trace("isUnsigned(): not a valid datatype"); + } + + return unsigned; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#getMetadata() + */ + @Override + public List getMetadata() throws HDF5Exception { + return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null)); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata(int...) + */ + public List getMetadata(int... attrPropList) throws HDF5Exception { + log.trace("getMetadata(): start"); + // load attributes first + if (attributeList == null) { + int indxType = fileFormat.getIndexType(null); + int order = fileFormat.getIndexOrder(null); + + if (attrPropList.length > 0) { + indxType = attrPropList[0]; + if (attrPropList.length > 1) { + order = attrPropList[1]; + } + } + + try { + attributeList = H5File.getAttribute(this, indxType, order); + } + catch (Exception ex) { + log.debug("getMetadata(): H5File.getAttribute failure: ", ex); + } + } // (attributeList == null) + + try { + this.linkTargetObjName = H5File.getLinkTargetName(this); + } + catch (Exception ex) { + log.debug("getMetadata(): H5File.linkTargetObjName failure: ", ex); + } + + log.trace("getMetadata(): finish"); + return attributeList; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#writeMetadata(java.lang.Object) + */ + @Override + public void writeMetadata(Object info) throws Exception { + log.trace("writeMetadata(): start"); + + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("writeMetadata(): Object not an Attribute"); + log.trace("writeMetadata(): finish"); + return; + } + + boolean attrExisted = false; + Attribute attr = (Attribute) info; + + if (attributeList == null) { + this.getMetadata(); + } + + if (attributeList != null) + attrExisted = attributeList.contains(attr); + + getFileFormat().writeAttribute(this, attr, attrExisted); + + // add the new attribute into attribute list + if (!attrExisted) { + attributeList.add(attr); + nAttributes = attributeList.size(); + } + log.trace("writeMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Datatype#removeMetadata(java.lang.Object) + */ + @Override + public void removeMetadata(Object info) throws HDF5Exception { + log.trace("removeMetadata(): start"); + + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("removeMetadata(): Object not an attribute"); + log.trace("removeMetadata(): finish"); + return; + } + + Attribute attr = (Attribute) info; + long tid = open(); + try { + H5.H5Adelete(tid, attr.getName()); + List attrList = getMetadata(); + attrList.remove(attr); + nAttributes = attributeList.size(); + } + catch (Exception ex) { + log.debug("removeMetadata(): ", ex); + } + finally { + close(tid); + } + log.trace("removeMetadata(): finish"); + } + + @Override + public void setName(String newName) throws Exception { + H5File.renameObject(this, newName); + super.setName(newName); + } + + @Override + public boolean isText() { + return (datatypeClass == Datatype.CLASS_STRING); + } + + public boolean isRefObj() { + return isRefObj; + } + + public boolean isRegRef() { + return isRegRef; + } + + public int getNativeStrPad() { + return nativeStrPad; + } + + /** + * Extracts compound information into flat structure. + *

+ * For example, compound datatype "nest" has {nest1{a, b, c}, d, e} then extractCompoundInfo() will + * put the names of nested compound fields into a flat list as + * + *

+     * nest.nest1.a
+     * nest.nest1.b
+     * nest.nest1.c
+     * nest.d
+     * nest.e
+     * 
+ * + *@param dtype + * the datatype to extract compound info from + * @param name + * the name of the compound datatype + * @param names + * the list to store the member names of the compound datatype + * @param flatListTypes + * the list to store the nested member names of the compound datatype + */ + public static void extractCompoundInfo(final H5Datatype dtype, String name, List names, List flatListTypes) { + log.trace("extractCompoundInfo(): start: name={}", name); + + if (dtype.isArray()) { + log.trace("extractCompoundInfo(): array type - extracting compound info from base datatype"); + H5Datatype.extractCompoundInfo((H5Datatype) dtype.getDatatypeBase(), name, names, flatListTypes); + } + else if (dtype.isVLEN() && !dtype.isVarStr()) { + log.trace("extractCompoundInfo(): variable-length type - extracting compound info from base datatype"); + H5Datatype.extractCompoundInfo((H5Datatype) dtype.getDatatypeBase(), name, names, flatListTypes); + } + else if (dtype.isCompound()) { + List compoundMemberNames = dtype.getCompoundMemberNames(); + List compoundMemberTypes = dtype.getCompoundMemberTypes(); + Datatype mtype = null; + String mname = null; + + if (compoundMemberNames == null) { + log.debug("extractCompoundInfo(): compoundMemberNames is null"); + log.trace("extractCompoundInfo(): exit"); + return; + } + + if (compoundMemberNames.isEmpty()) { + log.debug("extractCompoundInfo(): compound datatype has no members"); + log.trace("extractCompoundInfo(): exit"); + return; + } + + log.trace("extractCompoundInfo(): nMembers={}", compoundMemberNames.size()); + + for (int i = 0; i < compoundMemberNames.size(); i++) { + log.trace("extractCompoundInfo(): member[{}]:", i); + + mtype = compoundMemberTypes.get(i); + + log.trace("extractCompoundInfo(): type={} with size={}", mtype.getDescription(), mtype.getDatatypeSize()); + + if (names != null) { + mname = name + compoundMemberNames.get(i); + log.trace("extractCompoundInfo(): mname={}, name={}", mname, name); + } + + if (mtype.isCompound()) { + H5Datatype.extractCompoundInfo((H5Datatype) mtype, mname + CompoundDS.SEPARATOR, names, flatListTypes); + log.trace("extractCompoundInfo(): continue after recursive compound"); + continue; + } + + if (names != null) { + names.add(mname); + } + + flatListTypes.add(mtype); + + /* + * For ARRAY of COMPOUND and VLEN of COMPOUND types, we first add the top-level + * array or vlen type to the list of datatypes, and then follow that with a + * listing of the datatypes inside the nested compound. + */ + /* + * TODO: Don't flatten variable-length types until true variable-length support + * is implemented. + */ + if (mtype.isArray() /* || (mtype.isVLEN() && !mtype.isVarStr()) */) { + H5Datatype.extractCompoundInfo((H5Datatype) mtype, mname + CompoundDS.SEPARATOR, names, flatListTypes); + } + } + } + + log.trace("extractCompoundInfo(): finish"); + } + + /** + * Creates a datatype of a compound with one field. + *

+ * This function is needed to read/write data field by field. + * + * @param memberName + * The name of the datatype + * + * @return the identifier of the compound datatype. + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + */ + public long createCompoundFieldType(String memberName) throws HDF5Exception { + log.trace("createCompoundFieldType(): start member_name={}", memberName); + + long topTID = -1; + long tmpTID1 = -1; + + try { + if (this.isArray()) { + log.trace("createCompoundFieldType(): array datatype"); + + if (baseType != null) { + log.trace("createCompoundFieldType(): creating compound field type from base datatype"); + tmpTID1 = ((H5Datatype) baseType).createCompoundFieldType(memberName); + } + + log.trace("createCompoundFieldType(): creating container array datatype"); + topTID = H5.H5Tarray_create(tmpTID1, arrayDims.length, arrayDims); + } + else if (this.isVLEN()) { + log.trace("createCompoundFieldType(): variable-length datatype"); + + if (baseType != null) { + log.trace("createCompoundFieldType(): creating compound field type from base datatype"); + tmpTID1 = ((H5Datatype) baseType).createCompoundFieldType(memberName); + } + + log.trace("createCompoundFieldType(): creating container variable-length datatype"); + topTID = H5.H5Tvlen_create(tmpTID1); + } + else if (this.isCompound()) { + log.trace("createCompoundFieldType(): compound datatype"); + + String insertedName = memberName; + + int sep = memberName.indexOf(CompoundDS.SEPARATOR); + if (sep >= 0) { + /* + * If a compound separator character is present in the supplied string, then + * there is an additional level of compound nesting. We will create a compound + * type to hold the nested compound type. + */ + insertedName = memberName.substring(0, sep); + + log.trace("createCompoundFieldType(): member with name {} is nested inside compound", insertedName); + } + + /* + * Retrieve the index of the compound member by its name. + */ + int memberIndex = this.compoundMemberNames.indexOf(insertedName); + if (memberIndex >= 0) { + H5Datatype memberType = (H5Datatype) this.compoundMemberTypes.get(memberIndex); + + log.trace("createCompoundFieldType(): Member {} is type {} of size={} with baseType={}", insertedName, + memberType.getDescription(), memberType.getDatatypeSize(), memberType.getDatatypeBase()); + + if (sep >= 0) + /* + * Additional compound nesting; create the nested compound type. + */ + tmpTID1 = memberType.createCompoundFieldType(memberName.substring(sep + 1)); + else + tmpTID1 = memberType.createNative(); + + log.trace("createCompoundFieldType(): creating container compound datatype"); + topTID = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, datatypeSize); + + log.trace("createCompoundFieldType(): inserting member {} into compound datatype", insertedName); + H5.H5Tinsert(topTID, insertedName, 0, tmpTID1); + + /* + * WARNING!!! This step is crucial. Without it, the compound type created might be larger than + * the size of the single datatype field we are inserting. Performing a read with a compound + * datatype of an incorrect size will corrupt JVM memory and cause strange behavior and crashes. + */ + H5.H5Tpack(topTID); + } + else { + log.debug("createCompoundFieldType(): member name {} not found in compound datatype's member name list", memberName); + } + } + } + catch (Exception ex) { + log.debug("createCompoundFieldType(): creation of compound field type failed: ", ex); + topTID = -1; + } + finally { + close(tmpTID1); + } + + log.trace("createCompoundFieldType(): finish"); + + return topTID; + } + + private boolean datatypeIsComplex(long tid) { + long tclass = HDF5Constants.H5T_NO_CLASS; + + try { + tclass = H5.H5Tget_class(tid); + } + catch (Exception ex) { + log.debug("datatypeIsComplex():", ex); + } + + return (tclass == HDF5Constants.H5T_COMPOUND || tclass == HDF5Constants.H5T_ENUM || tclass == HDF5Constants.H5T_VLEN || tclass == HDF5Constants.H5T_ARRAY); + } + + private boolean datatypeIsAtomic(long tid) { + return !datatypeIsComplex(tid) || isOpaque() || isBitField(); + } +} diff --git a/src/main/java/hdf/object/h5/H5File.java b/src/main/java/hdf/object/h5/H5File.java new file mode 100644 index 0000000..e04402d --- /dev/null +++ b/src/main/java/hdf/object/h5/H5File.java @@ -0,0 +1,3175 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.io.File; +import java.lang.reflect.Array; +import java.util.Hashtable; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.structs.H5G_info_t; +import hdf.hdf5lib.structs.H5L_info_t; +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.Attribute; +import hdf.object.Dataset; +import hdf.object.Datatype; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.HObject; +import hdf.object.ScalarDS; + + +/** + * H5File is an implementation of the FileFormat class for HDF5 files. + *

+ * The HDF5 file structure is made up of HObjects stored in a tree-like fashion. Each tree node represents an + * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, rootObject, the + * tree can be traversed to find a specific object. + *

+ * The following example shows the implementation of finding an object for a given path in FileFormat. User applications + * can directly call the static method FileFormat.findObject(file, objPath) to get the object. + * + *

+ * HObject findObject(FileFormat file, String path) {
+ *     if (file == null || path == null)
+ *         return null;
+ *     if (!path.endsWith("/"))
+ *         path = path + "/";
+ *     HObject theRoot = file.getRootObject();
+ *     if (theRoot == null)
+ *         return null;
+ *     else if (path.equals("/"))
+ *         return theRoot;
+ *
+ *     Iterator local_it = ((Group) theRoot)
+ *             .breadthFirstMemberList().iterator();
+ *     HObject theObj = null;
+ *     while (local_it.hasNext()) {
+ *         theObj = local_it.next();
+ *         String fullPath = theObj.getFullName() + "/";
+ *         if (path.equals(fullPath) &&  theObj.getPath() != null ) {
+ *             break;
+ *     }
+ *     return theObj;
+ * }
+ * 
+ * + * @author Peter X. Cao + * @version 2.4 9/4/2007 + */ +public class H5File extends FileFormat { + private static final long serialVersionUID = 6247335559471526045L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); + + /** + * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and + * HDF5Constants.H5F_ACC_CREAT. + */ + private int flag; + + /** + * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. + */ + private int indexType = HDF5Constants.H5_INDEX_NAME; + + /** + * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. + */ + private int indexOrder = HDF5Constants.H5_ITER_INC; + + /** + * The root object of the file hierarchy. + */ + private HObject rootObject; + + /** + * How many characters maximum in an attribute name? + */ + private static final int attrNameLen = 256; + + /** + * The library version bounds + */ + private int[] libver; + public static final int LIBVER_LATEST = HDF5Constants.H5F_LIBVER_LATEST; + public static final int LIBVER_EARLIEST = HDF5Constants.H5F_LIBVER_EARLIEST; + public static final int LIBVER_V18 = HDF5Constants.H5F_LIBVER_V18; + public static final int LIBVER_V110 = HDF5Constants.H5F_LIBVER_V110; + + /*************************************************************************** + * Constructor + **************************************************************************/ + /** + * Constructs an H5File instance with an empty file name and read-only access. + */ + public H5File() { + this("", READ); + } + + /** + * Constructs an H5File instance with specified file name and read/write access. + *

+ * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. + * + * @param fileName + * A valid file name, with a relative or absolute path. + * + * @throws NullPointerException + * If the fileName argument is null. + */ + public H5File(String fileName) { + this(fileName, WRITE); + } + + /** + * Constructs an H5File instance with specified file name and access. + *

+ * The access parameter values and corresponding behaviors: + *

    + *
  • READ: Read-only access; open() will fail file doesn't exist.
  • + *
  • WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write + * access.
  • + *
  • CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be + * created or if file exists but can't be opened read/write.
  • + *
+ *

+ * This constructor does not open the file for access, nor does it confirm that the file can later be opened + * read/write or created. + *

+ * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the + * file isn't yet open. + * + * @param fileName + * A valid file name, with a relative or absolute path. + * @param access + * The file access flag, which determines behavior when file is opened. Acceptable values are + * READ, WRITE, and CREATE. + * + * @throws NullPointerException + * If the fileName argument is null. + */ + public H5File(String fileName, int access) { + // Call FileFormat ctor to set absolute path name + super(fileName); + libver = new int[2]; + + if ((access & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { + File f = new File(fileName); + if (f.exists()) { + access = WRITE; + } + else { + access = CREATE; + } + } + + // set metadata for the instance + rootObject = null; + this.fid = -1; + isReadOnly = (access == READ); + + // At this point we just set up the flags for what happens later. + // We just pass unexpected access values on... subclasses may have + // their own values. + if (access == READ) { + flag = HDF5Constants.H5F_ACC_RDONLY; + } + else if (access == WRITE) { + flag = HDF5Constants.H5F_ACC_RDWR; + } + else if (access == CREATE) { + flag = HDF5Constants.H5F_ACC_CREAT; + } + else { + flag = access; + } + } + + /*************************************************************************** + * Class methods + **************************************************************************/ + + /** + * Copies the attributes of one object to another object. + *

+ * This method copies all the attributes from one object (source object) to another (destination object). If an + * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding + * 256 characters will be truncated in the destination object. + *

+ * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because + * there is no H5Object class and it is specific to HDF5 objects. + *

+ * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions + * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(long, long)} + * + * @param src + * The source object. + * @param dst + * The destination object. + * + * @see #copyAttributes(long, long) + */ + public static final void copyAttributes(HObject src, HObject dst) { + if ((src != null) && (dst != null)) { + long srcID = src.open(); + long dstID = dst.open(); + + if ((srcID >= 0) && (dstID >= 0)) { + copyAttributes(srcID, dstID); + } + + if (srcID >= 0) { + src.close(srcID); + } + + if (dstID >= 0) { + dst.close(dstID); + } + } + } + + /** + * Copies the attributes of one object to another object. + *

+ * This method copies all the attributes from one object (source object) to another (destination object). If an + * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding + * 256 characters will be truncated in the destination object. + *

+ * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because + * there is no H5Object class and it is specific to HDF5 objects. + *

+ * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no + * exceptions are thrown. + * + * @param src_id + * The identifier of the source object. + * @param dst_id + * The identifier of the destination object. + */ + public static final void copyAttributes(long src_id, long dst_id) { + log.trace("copyAttributes(): start: src_id={} dst_id={}", src_id, dst_id); + long aid_src = -1; + long aid_dst = -1; + long asid = -1; + long atid = -1; + String aName = null; + H5O_info_t obj_info = null; + + try { + obj_info = H5.H5Oget_info(src_id); + } + catch (Exception ex) { + obj_info.num_attrs = -1; + } + + if (obj_info.num_attrs < 0) { + log.debug("copyAttributes(): no attributes"); + log.trace("copyAttributes(): finish"); + return; + } + + for (int i = 0; i < obj_info.num_attrs; i++) { + try { + aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, + i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + aName = H5.H5Aget_name(aid_src); + atid = H5.H5Aget_type(aid_src); + asid = H5.H5Aget_space(aid_src); + + aid_dst = H5.H5Acreate(dst_id, aName, atid, asid, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // use native data copy + H5.H5Acopy(aid_src, aid_dst); + + } + catch (Exception ex) { + log.debug("copyAttributes(): Attribute[{}] failure: ", i, ex); + } + + try { + H5.H5Sclose(asid); + } + catch (Exception ex) { + log.debug("copyAttributes(): Attribute[{}] H5Sclose(asid {}) failure: ", i, asid, ex); + } + try { + H5.H5Tclose(atid); + } + catch (Exception ex) { + log.debug("copyAttributes(): Attribute[{}] H5Tclose(atid {}) failure: ", i, atid, ex); + } + try { + H5.H5Aclose(aid_src); + } + catch (Exception ex) { + log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_src {}) failure: ", i, aid_src, ex); + } + try { + H5.H5Aclose(aid_dst); + } + catch (Exception ex) { + log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_dst {}) failure: ", i, aid_dst, ex); + } + + } // (int i=0; i + * This method returns a list containing the attributes associated with the + * identified object. If there are no associated attributes, an empty list will + * be returned. + *

+ * Attribute names exceeding 256 characters will be truncated in the returned + * list. + * + * @param obj + * The HObject whose attributes are to be returned. + * + * @return The list of the object's attributes. + * + * @throws HDF5Exception + * If an underlying HDF library routine is unable to perform a step + * necessary to retrieve the attributes. A variety of failures throw + * this exception. + * + * @see #getAttribute(HObject,int,int) + */ + public static final List getAttribute(HObject obj) throws HDF5Exception { + return H5File.getAttribute(obj, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); + } + + /** + * Returns a list of attributes for the specified object, in creation or + * alphabetical order. + *

+ * This method returns a list containing the attributes associated with the + * identified object. If there are no associated attributes, an empty list will + * be returned. The list of attributes returned can be in increasing or + * decreasing, creation or alphabetical order. + *

+ * Attribute names exceeding 256 characters will be truncated in the returned + * list. + * + * @param obj + * The HObject whose attributes are to be returned. + * @param idx_type + * The type of index. Valid values are: + *

    + *
  • H5_INDEX_NAME: An alpha-numeric index by attribute name + *
  • H5_INDEX_CRT_ORDER: An index by creation order + *
+ * @param order + * The index traversal order. Valid values are: + *
    + *
  • H5_ITER_INC: A top-down iteration incrementing the index + * position at each step. + *
  • H5_ITER_DEC: A bottom-up iteration decrementing the index + * position at each step. + *
+ * + * @return The list of the object's attributes. + * + * @throws HDF5Exception + * If an underlying HDF library routine is unable to perform a step + * necessary to retrieve the attributes. A variety of failures throw + * this exception. + */ + + public static final List getAttribute(HObject obj, int idx_type, int order) throws HDF5Exception { + log.trace("getAttribute(): start: obj={} idx_type={} order={}", obj, idx_type, order); + List attributeList = null; + long objID = -1; + long aid = -1; + long sid = -1; + long tid = -1; + H5O_info_t obj_info = null; + + objID = obj.open(); + if (objID >= 0) { + try { + try { + log.trace("getAttribute(): get obj_info"); + obj_info = H5.H5Oget_info(objID); + } + catch (Exception ex) { + log.debug("getAttribute(): H5Oget_info(objID {}) failure: ", objID, ex); + } + if (obj_info.num_attrs <= 0) { + log.trace("getAttribute(): no attributes"); + log.trace("getAttribute(): finish"); + return (attributeList = new Vector<>()); + } + + int n = (int) obj_info.num_attrs; + attributeList = new Vector<>(n); + log.trace("getAttribute(): num_attrs={}", n); + + for (int i = 0; i < n; i++) { + long lsize = 1; + log.trace("getAttribute(): attribute[{}]", i); + + try { + aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + sid = H5.H5Aget_space(aid); + log.trace("getAttribute(): Attribute[{}] aid={} sid={}", i, aid, sid); + + long dims[] = null; + int rank = H5.H5Sget_simple_extent_ndims(sid); + + log.trace("getAttribute(): Attribute[{}] isScalar={}", i, (rank == 0)); + + if (rank > 0) { + dims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, dims, null); + log.trace("getAttribute(): Attribute[{}] rank={}, dims={}", i, rank, dims); + for (int j = 0; j < dims.length; j++) { + lsize *= dims[j]; + } + } + + String nameA = H5.H5Aget_name(aid); + log.trace("getAttribute(): Attribute[{}] is {}", i, nameA); + + long tmptid = -1; + try { + tmptid = H5.H5Aget_type(aid); + tid = H5.H5Tget_native_type(tmptid); + log.trace("getAttribute(): Attribute[{}] tid={} native tmptid={} from aid={}", i, tid, + tmptid, aid); + } + finally { + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); + } + } + + H5Datatype attrType = null; + try { + attrType = new H5Datatype(tid); + + log.trace("getAttribute(): Attribute[{}] Datatype={}", i, attrType.getDescription()); + log.trace("getAttribute(): Attribute[{}] has size={} isCompound={} is_variable_str={} isVLEN={}", + i, lsize, attrType.isCompound(), attrType.isVarStr(), attrType.isVLEN()); + } + catch (Exception ex) { + log.debug("getAttribute(): failed to create datatype for Attribute[{}]: ", i, ex); + attrType = null; + } + + Attribute attr = new Attribute(obj, nameA, attrType, dims); + attributeList.add(attr); + + // retrieve the attribute value + if (lsize <= 0) { + log.debug("getAttribute(): Attribute[{}] lsize <= 0", i); + log.trace("getAttribute(): Attribute[{}] continue", i); + continue; + } + + if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) { + log.debug("getAttribute(): Attribute[{}] lsize outside valid Java int range; unsafe cast", i); + log.trace("getAttribute(): Attribute[{}] continue", i); + continue; + } + + Object value = null; + try { + if (attr.getDatatype().isVarStr()) { + String[] strs = new String[(int) lsize]; + for (int j = 0; j < lsize; j++) { + strs[j] = ""; + } + try { + log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); + H5.H5AreadVL(aid, tid, strs); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); + ex.printStackTrace(); + } + value = strs; + } + else if (attr.getDatatype().isCompound()) { + String[] strs = new String[(int) lsize]; + for (int j = 0; j < lsize; j++) { + strs[j] = ""; + } + try { + log.trace("getAttribute: attribute[{}] H5AreadComplex", i); + H5.H5AreadComplex(aid, tid, strs); + } + catch (Exception ex) { + ex.printStackTrace(); + } + value = strs; + } + else if (attr.getDatatype().isVLEN()) { + String[] strs = new String[(int) lsize]; + for (int j = 0; j < lsize; j++) { + strs[j] = ""; + } + try { + log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); + H5.H5AreadVL(aid, tid, strs); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); + ex.printStackTrace(); + } + value = strs; + } + else { + try { + value = H5Datatype.allocateArray(((H5Datatype) attr.getDatatype()), (int) lsize); + } + catch (OutOfMemoryError e) { + log.debug("getAttribute(): Attribute[{}] out of memory", i, e); + value = null; + } + if (value == null) { + log.debug("getAttribute(): Attribute[{}] allocateArray returned null", i); + log.trace("getAttribute(): Attribute[{}] continue", i); + continue; + } + + if (attr.getDatatype().isArray()) { + try { + log.trace("getAttribute(): Attribute[{}] H5Aread ARRAY tid={}", i, tid); + H5.H5Aread(aid, tid, value); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5Aread failure: ", i, ex); + ex.printStackTrace(); + } + } + else { + log.trace("getAttribute(): Attribute[{}] H5Aread", i); + H5.H5Aread(aid, tid, value); + } + + if (attr.getDatatype().isText() && value instanceof byte[]) { + log.trace("getAttribute(): isText: converting byte array to string array"); + value = attr.byteToString((byte[]) value, (int) attr.getDatatype().getDatatypeSize()); + } + else if (((H5Datatype)attr.getDatatype()).isRefObj()) { + log.trace("getAttribute(): Attribute[{}] isREF: converting byte array to long array", i); + value = HDFNativeData.byteToLong((byte[]) value); + } + } + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] read failure: ", i, ex); + continue; + } + + log.trace("getAttribute(): Attribute[{}] data: {}", i, value); + attr.setData(value); + } + catch (HDF5Exception ex) { + log.debug("getAttribute(): Attribute[{}] inspection failure: ", i, ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5Tclose(tid {}) failure: ", i, tid, ex); + } + try { + H5.H5Sclose(sid); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5Sclose(sid {}) failure: ", i, sid, ex); + } + try { + H5.H5Aclose(aid); + } + catch (Exception ex) { + log.debug("getAttribute(): Attribute[{}] H5Aclose(aid {}) failure: ", i, aid, ex); + } + } + } // (int i=0; i + * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple + * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} + * method may be used to write image attributes that are not handled by this method. + *

+ * For more information about HDF5 image attributes, see the + * HDF5 Image and Palette Specification. + *

+ * This method can be called to create attributes for 24-bit true color and indexed images. The + * selectionFlag parameter controls whether this will be an indexed or true color image. If + * selectionFlag is -1, this will be an indexed image. If the value is + * ScalarDS.INTERLACE_PIXEL or ScalarDS.INTERLACE_PLANE, it will be a 24-bit true color + * image with the indicated interlace mode. + *

+ *

    + * The created attribute descriptions, names, and values are: + *
  • The image identifier: name="CLASS", value="IMAGE" + *
  • The version of image: name="IMAGE_VERSION", value="1.2" + *
  • The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] + *
  • The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" + *
  • For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" + *
  • For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references + * to the palette datasets, with initial value of {-1} + *
+ *

+ * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File + * Format implementation level. + * + * @param dataset + * The image dataset the attributes are added to. + * @param selectionFlag + * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: + *

    + *
  • -1: Indexed Image.
  • ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a + * pixel are stored contiguously.
  • ScalarDS.INTERLACE_PLANE: True Color Image. Each component is + * stored in a separate plane. + *
+ * + * @throws Exception + * If there is a problem creating the attributes, or if the selectionFlag is invalid. + */ + private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { + log.trace("createImageAttributes(): start: dataset={}", dataset.toString()); + String subclass = null; + String interlaceMode = null; + + if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { + log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PIXEL"); + subclass = "IMAGE_TRUECOLOR"; + interlaceMode = "INTERLACE_PIXEL"; + } + else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { + log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PLANE"); + subclass = "IMAGE_TRUECOLOR"; + interlaceMode = "INTERLACE_PLANE"; + } + else if (selectionFlag == -1) { + log.trace("createImageAttributes(): subclass IMAGE_INDEXED"); + subclass = "IMAGE_INDEXED"; + } + else { + log.debug("createImageAttributes(): invalid selectionFlag"); + log.trace("createImageAttributes(): finish"); + throw new HDF5Exception("The selectionFlag is invalid."); + } + + String attrName = "CLASS"; + String[] classValue = { "IMAGE" }; + Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); + Attribute attr = new Attribute(dataset, attrName, attrType, null); + attr.write(classValue); + + attrName = "IMAGE_VERSION"; + String[] versionValue = { "1.2" }; + attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); + attr = new Attribute(dataset, attrName, attrType, null); + attr.write(versionValue); + + long[] attrDims = { 2 }; + attrName = "IMAGE_MINMAXRANGE"; + byte[] attrValueInt = { 0, (byte) 255 }; + attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); + attr = new Attribute(dataset, attrName, attrType, attrDims); + attr.write(attrValueInt); + + attrName = "IMAGE_SUBCLASS"; + String[] subclassValue = { subclass }; + attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); + attr = new Attribute(dataset, attrName, attrType, null); + attr.write(subclassValue); + + if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { + attrName = "INTERLACE_MODE"; + String[] interlaceValue = { interlaceMode }; + attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); + attr = new Attribute(dataset, attrName, attrType, null); + attr.write(interlaceValue); + } + else { + attrName = "PALETTE"; + long[] palRef = { 0 }; // set ref to null + attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); + attr = new Attribute(dataset, attrName, attrType, null); + attr.write(palRef); + } + log.trace("createImageAttributes(): finish"); + } + + /** + * Updates values of scalar dataset object references in copied file. + *

+ * This method has very specific functionality as documented below, and the user is advised to pay close attention + * when dealing with files that contain references. + *

+ * When a copy is made from one HDF file to another, object references and dataset region references are copied, but + * the references in the destination file are not updated by the copy and are therefore invalid. + *

+ * When an entire file is copied, this method updates the values of the object references and dataset region + * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the + * destination file. The method does not update references that occur in objects other than scalar datasets. + *

+ * In the current release, the updating of object references is not handled completely as it was not required by the + * projects that funded development. There is no support for updates when the copy does not include the entire file. + * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will + * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference + * updates itself. + * + * @param srcFile + * The file that was copied. + * @param dstFile + * The destination file where the object references will be updated. + * + * @throws Exception + * If there is a problem in the update process. + */ + public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { + log.trace("updateReferenceDataset(): start"); + if ((srcFile == null) || (dstFile == null)) { + log.debug("updateReferenceDataset(): srcFile or dstFile is null"); + log.trace("updateReferenceDataset(): finish"); + return; + } + + HObject srcRoot = srcFile.getRootObject(); + HObject newRoot = dstFile.getRootObject(); + + Iterator srcIt = getMembersBreadthFirst(srcRoot).iterator(); + Iterator newIt = getMembersBreadthFirst(newRoot).iterator(); + + long did = -1; + // build one-to-one table of between objects in + // the source file and new file + long tid = -1; + HObject srcObj, newObj; + Hashtable oidMap = new Hashtable<>(); + List refDatasets = new Vector<>(); + while (newIt.hasNext() && srcIt.hasNext()) { + srcObj = srcIt.next(); + newObj = newIt.next(); + oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); + did = -1; + tid = -1; + + // for Scalar DataSets in destination, if there is an object + // reference in the dataset, add it to the refDatasets list for + // later updating. + if (newObj instanceof ScalarDS) { + ScalarDS sd = (ScalarDS) newObj; + did = sd.open(); + if (did >= 0) { + try { + tid = H5.H5Dget_type(did); + if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { + refDatasets.add(sd); + } + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): ScalarDS reference failure: ", ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): ScalarDS reference H5Tclose(tid {}) failure: ", tid, ex); + } + } + } + sd.close(did); + } // (newObj instanceof ScalarDS) + } + + // Update the references in the scalar datasets in the dest file. + H5ScalarDS d = null; + long sid = -1; + int size = 0; + int rank = 0; + int n = refDatasets.size(); + for (int i = 0; i < n; i++) { + log.trace("updateReferenceDataset(): Update the references in the scalar datasets in the dest file"); + d = (H5ScalarDS) refDatasets.get(i); + byte[] buf = null; + long[] refs = null; + + try { + did = d.open(); + if (did >= 0) { + tid = H5.H5Dget_type(did); + sid = H5.H5Dget_space(did); + rank = H5.H5Sget_simple_extent_ndims(sid); + size = 1; + if (rank > 0) { + long[] dims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, dims, null); + log.trace("updateReferenceDataset(): rank={}, dims={}", rank, dims); + for (int j = 0; j < rank; j++) { + size *= (int) dims[j]; + } + dims = null; + } + + buf = new byte[size * 8]; + H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); + + // update the ref values + refs = HDFNativeData.byteToLong(buf); + size = refs.length; + for (int j = 0; j < size; j++) { + long[] theOID = oidMap.get(String.valueOf(refs[j])); + if (theOID != null) { + refs[j] = theOID[0]; + } + } + + // write back to file + H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); + } + else { + log.debug("updateReferenceDataset(): dest file dataset failed to open"); + } + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): Reference[{}] failure: ", i, ex); + log.trace("updateReferenceDataset(): Reference[{}] continue", i); + continue; + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Tclose(tid {}) failure: ", i, tid, ex); + } + try { + H5.H5Sclose(sid); + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Sclose(sid {}) failure: ", i, sid, ex); + } + try { + H5.H5Dclose(did); + } + catch (Exception ex) { + log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Dclose(did {}) failure: ", i, did, ex); + } + } + + refs = null; + buf = null; + } // (int i=0; i it = getMembersBreadthFirst(rootObject).iterator(); + while (it.hasNext()) { + theObj = it.next(); + + if (theObj instanceof Dataset) { + log.trace("close(): clear Dataset {}", ((Dataset) theObj).toString()); + ((Dataset) theObj).clear(); + } + else if (theObj instanceof Group) { + log.trace("close(): clear Group {}", ((Group) theObj).toString()); + ((Group) theObj).clear(); + } + } + } + + // Close all open objects associated with this file. + try { + int type = -1; + long[] oids; + long n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); + log.trace("close(): open objects={}", n); + + if (n > 0) { + if (n < Integer.MIN_VALUE || n > Integer.MAX_VALUE) throw new Exception("Invalid int size"); + + oids = new long[(int)n]; + H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); + + for (int i = 0; i < (int)n; i++) { + log.trace("close(): object[{}] id={}", i, oids[i]); + type = H5.H5Iget_type(oids[i]); + + if (HDF5Constants.H5I_DATASET == type) { + try { + H5.H5Dclose(oids[i]); + } + catch (Exception ex2) { + log.debug("close(): Object[{}] H5Dclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); + } + } + else if (HDF5Constants.H5I_GROUP == type) { + try { + H5.H5Gclose(oids[i]); + } + catch (Exception ex2) { + log.debug("close(): Object[{}] H5Gclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); + } + } + else if (HDF5Constants.H5I_DATATYPE == type) { + try { + H5.H5Tclose(oids[i]); + } + catch (Exception ex2) { + log.debug("close(): Object[{}] H5Tclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); + } + } + else if (HDF5Constants.H5I_ATTR == type) { + try { + H5.H5Aclose(oids[i]); + } + catch (Exception ex2) { + log.debug("close(): Object[{}] H5Aclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); + } + } + } // (int i=0; i0) + } + catch (Exception ex) { + log.debug("close(): failure: ", ex); + } + + try { + H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); + } + catch (Exception ex) { + log.debug("close(): H5Fflush(fid {}) failure: ", fid, ex); + } + + try { + H5.H5Fclose(fid); + } + catch (Exception ex) { + log.debug("close(): H5Fclose(fid {}) failure: ", fid, ex); + } + + // Set fid to -1 but don't reset rootObject + fid = -1; + log.trace("close(): finish"); + } + + /** + * Returns the root object of the open HDF5 File. + * + * @see FileFormat#getRootObject() + */ + @Override + public HObject getRootObject() { + return rootObject; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#get(java.lang.String) + */ + @Override + public HObject get(String path) throws Exception { + log.trace("get({}): start", path); + HObject obj = null; + + if ((path == null) || (path.length() <= 0)) { + log.debug("get(): path is null or invalid path length"); + System.err.println("(path == null) || (path.length() <= 0)"); + log.trace("get(): finish"); + return null; + } + + // replace the wrong slash and get rid of "//" + path = path.replace('\\', '/'); + path = "/" + path; + path = path.replaceAll("//", "/"); + + // the whole file tree is loaded. find the object in the tree + if (rootObject != null) { + obj = findObject(this, path); + } + + // found object in memory + if (obj != null) { + log.trace("get(): Found object in memory"); + log.trace("get(): finish"); + return obj; + } + + // open only the requested object + String name = null; + String pPath = null; + if (path.equals("/")) { + name = "/"; // the root + } + else { + // separate the parent path and the object name + if (path.endsWith("/")) { + path = path.substring(0, path.length() - 1); + } + + int idx = path.lastIndexOf('/'); + name = path.substring(idx + 1); + if (idx == 0) { + pPath = "/"; + } + else { + pPath = path.substring(0, idx); + } + } + + // do not open the full tree structure, only the file handler + long fid_before_open = fid; + fid = open(false); + if (fid < 0) { + log.debug("get(): Invalid FID"); + log.trace("get(): finish"); + System.err.println("Could not open file handler"); + return null; + } + + try { + H5O_info_t info; + int objType; + long oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); + + if (oid >= 0) { + info = H5.H5Oget_info(oid); + objType = info.type; + if (objType == HDF5Constants.H5O_TYPE_DATASET) { + long did = -1; + try { + did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); + obj = getDataset(did, name, pPath); + } + finally { + try { + H5.H5Dclose(did); + } + catch (Exception ex) { + log.debug("get(): {} H5Dclose(did {}) failure: ", path, did, ex); + } + } + } + else if (objType == HDF5Constants.H5O_TYPE_GROUP) { + long gid = -1; + try { + gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); + H5Group pGroup = null; + if (pPath != null) { + pGroup = new H5Group(this, null, pPath, null); + obj = getGroup(gid, name, pGroup); + pGroup.addToMemberList(obj); + } + else { + obj = getGroup(gid, name, pGroup); + } + } + finally { + try { + H5.H5Gclose(gid); + } + catch (Exception ex) { + log.debug("get(): {} H5Gclose(gid {}) failure: ", path, gid, ex); + } + } + } + else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { + obj = new H5Datatype(this, name, pPath); + } + } + try { + H5.H5Oclose(oid); + } + catch (Exception ex) { + log.debug("get(): H5Oclose(oid {}) failure: ", oid, ex); + ex.printStackTrace(); + } + } + catch (Exception ex) { + log.debug("get(): Exception finding obj {}", path, ex); + obj = null; + } + finally { + if ((fid_before_open <= 0) && (obj == null)) { + // close the fid that is not attached to any object + try { + H5.H5Fclose(fid); + } + catch (Exception ex) { + log.debug("get(): {} H5Fclose(fid {}) failure: ", path, fid, ex); + } + fid = fid_before_open; + } + } + + return obj; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createDatatype(int, int, int, int, java.lang.String) + */ + @Override + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception { + return createDatatype(tclass, tsize, torder, tsign, null, name); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype, java.lang.String) + */ + @Override + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) + throws Exception { + log.trace("createDatatype(): start: name={} class={} size={} order={} sign={}", name, tclass, tsize, torder, tsign); + if (tbase != null) + log.trace("createDatatype(): baseType is {}", tbase.getDescription()); + + long tid = -1; + H5Datatype dtype = null; + + try { + H5Datatype t = (H5Datatype) createDatatype(tclass, tsize, torder, tsign, tbase); + if ((tid = t.createNative()) < 0) { + log.debug("createDatatype(): createNative() failure"); + log.trace("createDatatype(): finish"); + throw new Exception("createNative() failed"); + } + + H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); + long l = HDFNativeData.byteToLong(ref_buf, 0); + + long[] oid = new long[1]; + oid[0] = l; // save the object ID + + dtype = new H5Datatype(this, null, name); + } + finally { + H5.H5Tclose(tid); + } + + log.trace("createDatatype(): finish"); + return dtype; + } + + /*************************************************************************** + * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to + * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases + * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class + * so that we create the proper type of HObject... H5Group for example. + * + * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class + * methods (they can only be shadowed in Java), these are instance methods. + * + **************************************************************************/ + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createDatatype(int, int, int, int) + */ + @Override + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { + log.trace("create datatype"); + return new H5Datatype(tclass, tsize, torder, tsign); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) + */ + @Override + public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { + log.trace("create datatype with base"); + return new H5Datatype(tclass, tsize, torder, tsign, tbase); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, + * long[], long[], long[], int, java.lang.Object) + */ + @Override + public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, + int gzip, Object fillValue, Object data) throws Exception { + log.trace("createScalarDS(): name={}", name); + if (pgroup == null) { + // create new dataset at the root group by default + pgroup = (Group) get("/"); + } + + return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], + * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) + */ + @Override + public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, + String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { + log.trace("createCompoundDS(): start: name={}", name); + int nMembers = memberNames.length; + int memberRanks[] = new int[nMembers]; + long memberDims[][] = new long[nMembers][1]; + Dataset ds = null; + + for (int i = 0; i < nMembers; i++) { + memberRanks[i] = 1; + if (memberSizes == null) { + memberDims[i][0] = 1; + } + else { + memberDims[i][0] = memberSizes[i]; + } + } + + if (pgroup == null) { + // create new dataset at the root group by default + pgroup = (Group) get("/"); + } + ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, memberNames, memberDatatypes, memberRanks, + memberDims, data); + + log.trace("createCompoundDS(): finish"); + return ds; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, + * long[], long[], long[], int, int, int, java.lang.Object) + */ + @Override + public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, + int gzip, int ncomp, int interlace, Object data) throws Exception { + log.trace("createImage(): start: name={}", name); + if (pgroup == null) { // create at the root group by default + pgroup = (Group) get("/"); + } + + H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); + + try { + H5File.createImageAttributes(dataset, interlace); + dataset.setIsImage(true); + } + catch (Exception ex) { + log.debug("createImage(): {} createImageAttributtes failure: ", name, ex); + } + + log.trace("createImage(): finish"); + return dataset; + } + + /*** + * Creates a new group with specified name in existing group. + * + * @see FileFormat#createGroup(String, Group) + */ + @Override + public Group createGroup(String name, Group pgroup) throws Exception { + return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); + } + + /*** + * Creates a new group with specified name in existing group and with the group creation properties list, gplist. + * + * @see hdf.object.h5.H5Group#create(String, Group, long...) + * + */ + @Override + public Group createGroup(String name, Group pgroup, long... gplist) throws Exception { + // create new group at the root + if (pgroup == null) { + pgroup = (Group) this.get("/"); + } + + return H5Group.create(name, pgroup, gplist); + } + + /*** + * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. + * + * @see FileFormat#createGcpl(int, int, int) + * + */ + @Override + public long createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { + log.trace("createGcpl(): start"); + long gcpl = -1; + try { + gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); + if (gcpl >= 0) { + // Set link creation order. + if (creationorder == Group.CRT_ORDER_TRACKED) { + log.trace("createGcpl(): creation order ORDER_TRACKED"); + H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); + } + else if (creationorder == Group.CRT_ORDER_INDEXED) { + log.trace("createGcpl(): creation order ORDER_INDEXED"); + H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED + + HDF5Constants.H5P_CRT_ORDER_INDEXED); + } + // Set link storage. + H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); + } + } + catch (Exception ex) { + log.debug("createGcpl(): failure: ", ex); + ex.printStackTrace(); + } + + log.trace("createGcpl(): finish"); + return gcpl; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) + */ + @Override + public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { + if (currentObj instanceof HObject) + return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); + else if (currentObj instanceof String) + return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); + + return null; + } + + /** + * Creates a link to an object in the open file. + *

+ * If parentGroup is null, the new link is created in the root group. + * + * @param parentGroup + * The group where the link is created. + * @param name + * The name of the link. + * @param currentObj + * The existing object the new link will reference. + * @param lType + * The type of link to be created. It can be a hard link, a soft link or an external link. + * + * @return The object pointed to by the new link if successful; otherwise returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + @Override + public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { + log.trace("createLink(): start: name={}", name); + HObject obj = null; + int type = 0; + String current_full_name = null; + String new_full_name = null; + String parent_path = null; + + if (currentObj == null) { + log.debug("createLink(): Link target is null"); + log.trace("createLink(): finish"); + throw new HDF5Exception("The object pointed to by the link cannot be null."); + } + if ((parentGroup == null) || parentGroup.isRoot()) { + parent_path = HObject.SEPARATOR; + } + else { + parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; + } + + new_full_name = parent_path + name; + + if (lType == Group.LINK_TYPE_HARD) { + type = HDF5Constants.H5L_TYPE_HARD; + log.trace("createLink(): type H5L_TYPE_HARD"); + } + else if (lType == Group.LINK_TYPE_SOFT) { + type = HDF5Constants.H5L_TYPE_SOFT; + log.trace("createLink(): type H5L_TYPE_SOFT"); + } + else if (lType == Group.LINK_TYPE_EXTERNAL) { + type = HDF5Constants.H5L_TYPE_EXTERNAL; + log.trace("createLink(): type H5L_TYPE_EXTERNAL"); + } + + if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { + H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); + } + + if (type == HDF5Constants.H5L_TYPE_HARD) { + if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { + log.debug("createLink(): cannot create link to root group"); + log.trace("createLink(): finish"); + throw new HDF5Exception("Cannot make a link to the root group."); + } + current_full_name = currentObj.getPath() + HObject.SEPARATOR + currentObj.getName(); + + H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + + else if (type == HDF5Constants.H5L_TYPE_SOFT) { + H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + + else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { + H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + + if (currentObj instanceof Group) { + log.trace("createLink(): Link target is type H5Group"); + obj = new H5Group(this, name, parent_path, parentGroup); + } + else if (currentObj instanceof H5Datatype) { + log.trace("createLink(): Link target is type H5Datatype"); + obj = new H5Datatype(this, name, parent_path); + } + else if (currentObj instanceof H5CompoundDS) { + log.trace("createLink(): Link target is type H5CompoundDS"); + obj = new H5CompoundDS(this, name, parent_path); + } + else if (currentObj instanceof H5ScalarDS) { + log.trace("createLink(): Link target is type H5ScalarDS"); + obj = new H5ScalarDS(this, name, parent_path); + } + + log.trace("createLink(): finish"); + return obj; + } + + /** + * Creates a soft or external link to object in a file that does not exist at the time the link is created. + * + * @param parentGroup + * The group where the link is created. + * @param name + * The name of the link. + * @param currentObj + * The name of the object the new link will reference. The object doesn't have to exist. + * @param lType + * The type of link to be created. + * + * @return The H5Link object pointed to by the new link if successful; otherwise returns null. + * + * @throws Exception + * The exceptions thrown vary depending on the implementing class. + */ + @Override + public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { + log.trace("createLink(): start: name={}", name); + HObject obj = null; + int type = 0; + String new_full_name = null; + String parent_path = null; + + if (currentObj == null) { + log.debug("createLink(): Link target is null"); + log.trace("createLink(): finish"); + throw new HDF5Exception("The object pointed to by the link cannot be null."); + } + if ((parentGroup == null) || parentGroup.isRoot()) { + parent_path = HObject.SEPARATOR; + } + else { + parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; + } + + new_full_name = parent_path + name; + + if (lType == Group.LINK_TYPE_HARD) { + type = HDF5Constants.H5L_TYPE_HARD; + log.trace("createLink(): type H5L_TYPE_HARD"); + } + else if (lType == Group.LINK_TYPE_SOFT) { + type = HDF5Constants.H5L_TYPE_SOFT; + log.trace("createLink(): type H5L_TYPE_SOFT"); + } + else if (lType == Group.LINK_TYPE_EXTERNAL) { + type = HDF5Constants.H5L_TYPE_EXTERNAL; + log.trace("createLink(): type H5L_TYPE_EXTERNAL"); + } + + if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { + H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); + } + + if (type == HDF5Constants.H5L_TYPE_SOFT) { + H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + + else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { + String fileName = null; + String objectName = null; + + // separate the object name and the file name + fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); + objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); + objectName = objectName.substring(3); + + H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + + if (name.startsWith(HObject.SEPARATOR)) { + name = name.substring(1); + } + obj = new H5Link(this, name, parent_path); + + log.trace("createLink(): finish"); + return obj; + } + + /** + * reload the sub-tree structure from file. + *

+ * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in + * memory is not changed. + * + * @param g + * the group where the structure is to be reloaded in memory + */ + public void reloadTree(Group g) { + if (fid < 0 || rootObject == null || g == null) { + log.debug("reloadTree(): Invalid fid or null object"); + return; + } + + depth_first(g, Integer.MIN_VALUE); + } + + /* + * (non-Javadoc) NOTE: Object references are copied but not updated by this method. + * + * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) + */ + @Override + public HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { + log.trace("copy(): start: srcObj={} dstGroup={} dstName={}", srcObj, dstGroup, dstName); + if ((srcObj == null) || (dstGroup == null)) { + log.debug("copy(): srcObj or dstGroup is null"); + log.trace("copy(): finish"); + return null; + } + + if (dstName == null) { + dstName = srcObj.getName(); + } + + List members = dstGroup.getMemberList(); + int n = members.size(); + for (int i = 0; i < n; i++) { + HObject obj = members.get(i); + String name = obj.getName(); + while (name.equals(dstName)) + dstName += "~copy"; + } + + HObject newObj = null; + if (srcObj instanceof Dataset) { + log.trace("copy(): srcObj instanceof Dataset"); + newObj = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); + } + else if (srcObj instanceof H5Group) { + log.trace("copy(): srcObj instanceof H5Group"); + newObj = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); + } + else if (srcObj instanceof H5Datatype) { + log.trace("copy(): srcObj instanceof H5Datatype"); + newObj = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); + } + + log.trace("copy(): finish"); + return newObj; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#delete(hdf.object.HObject) + */ + @Override + public void delete(HObject obj) throws Exception { + if ((obj == null) || (fid < 0)) { + log.debug("delete(): Invalid FID or object is null"); + return; + } + + String name = obj.getPath() + obj.getName(); + + H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) + */ + @Override + public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { + log.trace("writeAttribute(): start"); + + String obj_name = obj.getFullName(); + String name = attr.getName(); + long tid = -1; + long sid = -1; + long aid = -1; + log.trace("writeAttribute(): name is {}", name); + + long objID = obj.open(); + if (objID < 0) { + log.debug("writeAttribute(): Invalid Object ID"); + log.trace("writeAttribute(): finish"); + return; + } + + if ((tid = attr.getDatatype().createNative()) >= 0) { + log.trace("writeAttribute(): tid {} from toNative :{}", tid, attr.getDatatype().getDescription()); + try { + if (attr.isScalar()) + sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); + else + sid = H5.H5Screate_simple(attr.getRank(), attr.getDims(), null); + + if (attrExisted) { + aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + else { + aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + log.trace("writeAttribute(): aid {} opened/created", aid); + + // update value of the attribute + Object attrValue; + try { + attrValue = attr.getData(); + } + catch (Exception ex) { + attrValue = null; + log.trace("writeAttribute(): getData() failure:", ex); + } + + log.trace("writeAttribute(): getValue"); + if (attrValue != null) { + if (attr.getDatatype().isVLEN()) { + log.trace("writeAttribute(): isVLEN"); + try { + /* + * must use native type to write attribute data to file (see bug 1069) + */ + long tmptid = tid; + tid = H5.H5Tget_native_type(tmptid); + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex) { + log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); + } + log.trace("writeAttribute(): H5.H5AwriteVL, {} : {}", name, attr.getDatatype().getDescription()); + if ((attrValue instanceof String) || (attr.getDims().length == 1)) { + H5.H5AwriteVL(aid, tid, (String[]) attrValue); + } + else { + log.info("writeAttribute(): Datatype is not a string, unable to write {} data", name); + } + } + catch (Exception ex) { + log.debug("writeAttribute(): native type failure: ", name, ex); + } + } + else { + if (attr.getDatatype().isRef() && attrValue instanceof String) { + // reference is a path+name to the object + attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); + log.trace("writeAttribute(): Attribute class is CLASS_REFERENCE"); + } + else if (Array.get(attrValue, 0) instanceof String) { + long size = attr.getDatatype().getDatatypeSize(); + int len = ((String[]) attrValue).length; + byte[] bval = Dataset.stringToByte((String[]) attrValue, (int)size); + if (bval != null && bval.length == size * len) { + bval[bval.length - 1] = 0; + attrValue = bval; + } + log.trace("writeAttribute(): String={}: {}", attrValue, name); + } + + try { + /* + * must use native type to write attribute data to file (see bug 1069) + */ + long tmptid = tid; + tid = H5.H5Tget_native_type(tmptid); + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex) { + log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); + } + log.trace("writeAttribute(): H5.H5Awrite, {} :{}", name, attr.getDatatype().getDescription()); + H5.H5Awrite(aid, tid, attrValue); + } + catch (Exception ex) { + log.debug("writeAttribute(): native type failure: ", ex); + } + } + } // (attrValue != null) + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("writeAttribute(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Sclose(sid); + } + catch (Exception ex) { + log.debug("writeAttribute(): H5Sclose(sid {}) failure: ", sid, ex); + } + try { + H5.H5Aclose(aid); + } + catch (Exception ex) { + log.debug("writeAttribute(): H5Aclose(aid {}) failure: ", aid, ex); + } + } + } + else { + log.debug("writeAttribute(): toNative failure"); + } + + obj.close(objID); + log.trace("writeAttribute(): finish"); + } + + /*************************************************************************** + * Implementations for methods specific to H5File + **************************************************************************/ + + /** + * Opens a file with specific file access property list. + *

+ * This function does the same as "long open()" except the you can also pass an HDF5 file access property to file + * open. For example, + * + *

+     * // All open objects remaining in the file are closed then file is closed
+     * long plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+     * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG);
+     * long fid = open(plist);
+     * 
+ * + * @param plist + * a file access property list identifier. + * + * @return the file identifier if successful; otherwise returns negative value. + * + * @throws Exception + * If there is a failure. + */ + public long open(long plist) throws Exception { + return open(true, plist); + } + + /*************************************************************************** + * Private methods. + **************************************************************************/ + + /** + * Opens access to this file. + * + * @param loadFullHierarchy + * if true, load the full hierarchy into memory; otherwise just opens the file identifier. + * + * @return the file identifier if successful; otherwise returns negative value. + * + * @throws Exception + * If there is a failure. + */ + private long open(boolean loadFullHierarchy) throws Exception { + long the_fid = -1; + + long plist = HDF5Constants.H5P_DEFAULT; + + // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache + /** + * try { //All open objects remaining in the file are closed // then file is closed plist = + * H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); H5.H5Pset_fclose_degree ( plist, + * HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {} the_fid = open(loadFullHierarchy, + * plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} + */ + + log.trace("open(): loadFull={}", loadFullHierarchy); + the_fid = open(loadFullHierarchy, plist); + + return the_fid; + } + + /** + * Opens access to this file. + * + * @param loadFullHierarchy + * if true, load the full hierarchy into memory; otherwise just opens the file identifier. + * + * @return the file identifier if successful; otherwise returns negative value. + * + * @throws Exception + * If there is a failure. + */ + private long open(boolean loadFullHierarchy, long plist) throws Exception { + log.trace("open(loadFullHierarchy = {}, plist = {}): start", loadFullHierarchy, plist); + if (fid > 0) { + log.trace("open(): FID already opened"); + log.trace("open(): finish"); + return fid; // file is opened already + } + + // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) + // to make it work for external datasets. We need to set it back + // before the file is closed/opened. + String rootPath = System.getProperty("hdfview.workdir"); + if (rootPath == null) { + rootPath = System.getProperty("user.dir"); + } + System.setProperty("user.dir", rootPath); + + // check for valid file access permission + if (flag < 0) { + log.debug("open(): Invalid access identifier -- " + flag); + log.trace("open(): finish"); + throw new HDF5Exception("Invalid access identifer -- " + flag); + } + else if (HDF5Constants.H5F_ACC_CREAT == flag) { + // create a new file + log.trace("open(): create file"); + fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); + H5.H5Fclose(fid); + flag = HDF5Constants.H5F_ACC_RDWR; + } + else if (!exists()) { + log.debug("open(): File {} does not exist", fullFileName); + log.trace("open(): finish"); + throw new HDF5Exception("File does not exist -- " + fullFileName); + } + else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { + log.debug("open(): Cannot write file {}", fullFileName); + log.trace("open(): finish"); + throw new HDF5Exception("Cannot write file, try opening as read-only -- " + fullFileName); + } + else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { + log.debug("open(): Cannot read file {}", fullFileName); + log.trace("open(): finish"); + throw new HDF5Exception("Cannot read file -- " + fullFileName); + } + + try { + log.trace("open(): open file"); + fid = H5.H5Fopen(fullFileName, flag, plist); + } + catch (Exception ex) { + try { + log.debug("open(): open failed, attempting to open file read-only"); + fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + isReadOnly = true; + } + catch (Exception ex2) { + // Attempt to open the file as a split file or family file + try { + File tmpf = new File(fullFileName); + String tmpname = tmpf.getName(); + int idx = tmpname.lastIndexOf('.'); + + if (tmpname.contains("-m")) { + log.debug("open(): open read-only failed, attempting to open split file"); + + while (idx > 0) { + char c = tmpname.charAt(idx - 1); + if (c != '-') + idx--; + else + break; + } + + if (idx > 0) { + tmpname = tmpname.substring(0, idx - 1); + log.trace("open(): attempting to open split file with name {}", tmpname); + long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + H5.H5Pset_fapl_split(pid, "-m.h5", HDF5Constants.H5P_DEFAULT, "-r.h5", HDF5Constants.H5P_DEFAULT); + fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); + H5.H5Pclose(pid); + } + } + else { + log.debug("open(): open read-only failed, checking for file family"); + // try to see if it is a file family, always open a family file + // from the first one since other files will not be recognized + // as an HDF5 file + int cnt = idx; + while (idx > 0) { + char c = tmpname.charAt(idx - 1); + if (Character.isDigit(c)) + idx--; + else + break; + } + + if (idx > 0) { + cnt -= idx; + tmpname = tmpname.substring(0, idx) + "%0" + cnt + "d" + tmpname.substring(tmpname.lastIndexOf('.')); + log.trace("open(): attempting to open file family with name {}", tmpname); + long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); + fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); + H5.H5Pclose(pid); + } + } + } + catch (Exception ex3) { + log.debug("open(): open failed: ", ex3); + } + } + } + + if ((fid >= 0) && loadFullHierarchy) { + // load the hierarchy of the file + log.trace("open(loadFullHeirarchy): load the hierarchy"); + loadIntoMemory(); + } + + log.trace("open(loadFullHeirarchy = {}, plist = {}): finish", loadFullHierarchy, plist); + return fid; + } + + /** + * Loads the file structure into memory. + */ + private void loadIntoMemory() { + log.trace("loadIntoMemory(): start"); + if (fid < 0) { + log.debug("loadIntoMemory(): Invalid FID"); + return; + } + + /* + * TODO: Root group's name should be changed to 'this.getName()' and all + * previous accesses of this field should now use getPath() instead of getName() + * to get the root group. The root group actually does have a path of "/". The + * depth_first method will have to be changed to setup other object paths + * appropriately, as it currently assumes the root path to be null. + */ + rootObject = new H5Group(this, "/", null, null); + log.trace("loadIntoMemory(): depth_first on root"); + depth_first(rootObject, 0); + log.trace("loadIntoMemory(): finish"); + } + + /** + * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and + * datasets only. It does not include named datatypes and soft links. + *

+ * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing + * a path back up to the root. + * + * @param parentObject + * the parent object. + */ + @SuppressWarnings("deprecation") + private int depth_first(HObject parentObject, int nTotal) { + log.trace("depth_first({}): start", parentObject); + + int nelems; + String fullPath = null; + String ppath = null; + long gid = -1; + + H5Group pgroup = (H5Group) parentObject; + ppath = pgroup.getPath(); + + if (ppath == null) { + fullPath = HObject.SEPARATOR; + } + else { + fullPath = ppath + pgroup.getName() + HObject.SEPARATOR; + } + + nelems = 0; + try { + gid = pgroup.open(); + H5G_info_t info = H5.H5Gget_info(gid); + nelems = (int) info.nlinks; + } + catch (HDF5Exception ex) { + nelems = -1; + log.debug("depth_first({}): H5Gget_info(gid {}) failure: ", parentObject, gid, ex); + } + + if (nelems <= 0) { + pgroup.close(gid); + log.debug("depth_first({}): nelems <= 0", parentObject); + log.trace("depth_first({}): finish", parentObject); + return nTotal; + } + + // since each call of H5.H5Gget_objname_by_idx() takes about one second. + // 1,000,000 calls take 12 days. Instead of calling it in a loop, + // we use only one call to get all the information, which takes about + // two seconds + int[] objTypes = new int[nelems]; + long[] fNos = new long[nelems]; + long[] objRefs = new long[nelems]; + String[] objNames = new String[nelems]; + + try { + H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); + } + catch (HDF5Exception ex) { + log.debug("depth_first({}): failure: ", parentObject, ex); + log.trace("depth_first({}): finish", parentObject); + ex.printStackTrace(); + return nTotal; + } + + int nStart = getStartMembers(); + int nMax = getMaxMembers(); + + String obj_name; + int obj_type; + + // Iterate through the file to see members of the group + for (int i = 0; i < nelems; i++) { + obj_name = objNames[i]; + obj_type = objTypes[i]; + log.trace("depth_first({}): obj_name={}, obj_type={}", parentObject, obj_name, obj_type); + long oid[] = { objRefs[i], fNos[i] }; + + if (obj_name == null) { + log.trace("depth_first({}): continue after null obj_name", parentObject); + continue; + } + + nTotal++; + + if (nMax > 0) { + if ((nTotal - nStart) >= nMax) + break; // loaded enough objects + } + + boolean skipLoad = false; + if ((nTotal > 0) && (nTotal < nStart)) + skipLoad = true; + + // create a new group + if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { + H5Group g = new H5Group(this, obj_name, fullPath, pgroup); + + pgroup.addToMemberList(g); + + // detect and stop loops + // a loop is detected if there exists object with the same + // object ID by tracing path back up to the root. + boolean hasLoop = false; + H5Group tmpObj = (H5Group) parentObject; + + while (tmpObj != null) { + if (tmpObj.equalsOID(oid) && (tmpObj.getPath() != null)) { + hasLoop = true; + break; + } + else { + tmpObj = (H5Group) tmpObj.getParent(); + } + } + + // recursively go through the next group + // stops if it has loop. + if (!hasLoop) { + nTotal = depth_first(g, nTotal); + } + } + else if (skipLoad) { + continue; + } + else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { + long did = -1; + long tid = -1; + int tclass = -1; + try { + did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); + if (did >= 0) { + tid = H5.H5Dget_type(did); + + tclass = H5.H5Tget_class(tid); + if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { + // for ARRAY, the type is determined by the base type + long btid = H5.H5Tget_super(tid); + + tclass = H5.H5Tget_class(btid); + + try { + H5.H5Tclose(btid); + } + catch (Exception ex) { + log.debug("depth_first({})[{}] dataset {} H5Tclose(btid {}) failure: ", parentObject, i, obj_name, btid, ex); + } + } + } + else { + log.debug("depth_first({})[{}] {} dataset open failure", parentObject, i, obj_name); + } + } + catch (Exception ex) { + log.debug("depth_first({})[{}] {} dataset access failure: ", parentObject, i, obj_name, ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("depth_first({})[{}] daatset {} H5Tclose(tid {}) failure: ", parentObject, i, obj_name, tid, ex); + } + try { + H5.H5Dclose(did); + } + catch (Exception ex) { + log.debug("depth_first({})[{}] dataset {} H5Dclose(did {}) failure: ", parentObject, i, obj_name, did, ex); + } + } + Dataset d = null; + if (tclass == HDF5Constants.H5T_COMPOUND) { + // create a new compound dataset + d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! + } + else { + // create a new scalar dataset + d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! + } + + pgroup.addToMemberList(d); + } + else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { + Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! + + pgroup.addToMemberList(t); + } + else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { + H5Link link = new H5Link(this, obj_name, fullPath, oid); + + pgroup.addToMemberList(link); + continue; // do the next one, if the object is not identified. + } + } // ( i = 0; i < nelems; i++) + + pgroup.close(gid); + + log.trace("depth_first({}): finish", parentObject); + return nTotal; + } // private depth_first() + + /** + * Returns a list of all the members of this H5File in a + * breadth-first ordering that are rooted at the specified + * object. + */ + private static List getMembersBreadthFirst(HObject obj) { + List allMembers = new Vector<>(); + Queue queue = new LinkedList<>(); + HObject currentObject = obj; + + queue.add(currentObject); + + while(!queue.isEmpty()) { + currentObject = queue.remove(); + allMembers.add(currentObject); + + if(currentObject instanceof Group) { + queue.addAll(((Group) currentObject).getMemberList()); + } + } + + return allMembers; + } + + private HObject copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { + log.trace("copyDataset(): start"); + Dataset dataset = null; + long srcdid = -1, dstdid = -1; + long ocp_plist_id = -1; + String dname = null, path = null; + + if (pgroup.isRoot()) { + path = HObject.SEPARATOR; + } + else { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + } + + if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { + dstName = srcDataset.getName(); + } + dname = path + dstName; + + try { + srcdid = srcDataset.open(); + dstdid = pgroup.open(); + + try { + ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); + H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); + H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("copyDataset(): {} failure: ", dname, ex); + } + finally { + try { + H5.H5Pclose(ocp_plist_id); + } + catch (Exception ex) { + log.debug("copyDataset(): {} H5Pclose(ocp_plist_id {}) failure: ", dname, ocp_plist_id, ex); + } + } + + if (srcDataset instanceof H5ScalarDS) { + dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); + } + else { + dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); + } + + pgroup.addToMemberList(dataset); + } + finally { + try { + srcDataset.close(srcdid); + } + catch (Exception ex) { + log.debug("copyDataset(): {} srcDataset.close(srcdid {}) failure: ", dname, srcdid, ex); + } + try { + pgroup.close(dstdid); + } + catch (Exception ex) { + log.debug("copyDataset(): {} pgroup.close(dstdid {}) failure: ", dname, dstdid, ex); + } + } + + log.trace("copyDataset(): finish"); + return dataset; + } + + /** + * Constructs a dataset for specified dataset identifier. + * + * @param did + * the dataset identifier + * @param name + * the name of the dataset + * @param path + * the path of the dataset + * + * @return the dataset if successful; otherwise return null. + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + */ + private Dataset getDataset(long did, String name, String path) throws HDF5Exception { + log.trace("getDataset(): start"); + Dataset dataset = null; + if (did >= 0) { + long tid = -1; + int tclass = -1; + try { + tid = H5.H5Dget_type(did); + tclass = H5.H5Tget_class(tid); + if (tclass == HDF5Constants.H5T_ARRAY) { + // for ARRAY, the type is determined by the base type + long btid = H5.H5Tget_super(tid); + tclass = H5.H5Tget_class(btid); + try { + H5.H5Tclose(btid); + } + catch (Exception ex) { + log.debug("getDataset(): {} H5Tclose(btid {}) failure: ", name, btid, ex); + } + } + } + finally { + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("getDataset(): {} H5Tclose(tid {}) failure: ", name, tid, ex); + } + } + + if (tclass == HDF5Constants.H5T_COMPOUND) { + dataset = new H5CompoundDS(this, name, path); + } + else { + dataset = new H5ScalarDS(this, name, path); + } + } + else { + log.debug("getDataset(): id failure"); + } + + log.trace("getDataset(): finish"); + return dataset; + } + + /** + * Copies a named datatype to another location. + * + * @param srcType + * the source datatype + * @param pgroup + * the group which the new datatype is copied to + * @param dstName + * the name of the new dataype + * + * @throws Exception + * If there is a failure. + */ + private HObject copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { + log.trace("copyDatatype(): start"); + Datatype datatype = null; + long tid_src = -1; + long gid_dst = -1; + String path = null; + + if (pgroup.isRoot()) { + path = HObject.SEPARATOR; + } + else { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + } + + if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { + dstName = srcType.getName(); + } + + try { + tid_src = srcType.open(); + gid_dst = pgroup.open(); + + try { + H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("copyDatatype(): {} H5Ocopy(tid_src {}) failure: ", dstName, tid_src, ex); + } + datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); + + pgroup.addToMemberList(datatype); + } + finally { + try { + srcType.close(tid_src); + } + catch (Exception ex) { + log.debug("copyDatatype(): {} srcType.close(tid_src {}) failure: ", dstName, tid_src, ex); + } + try { + pgroup.close(gid_dst); + } + catch (Exception ex) { + log.debug("copyDatatype(): {} pgroup.close(gid_dst {}) failure: ", dstName, gid_dst, ex); + } + } + + log.trace("copyDatatype(): finish"); + return datatype; + } + + /** + * Copies a group and its members to a new location. + * + * @param srcGroup + * the source group + * @param dstGroup + * the location where the new group is located + * @param dstName + * the name of the new group + * + * @throws Exception + * If there is a failure. + */ + private HObject copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { + log.trace("copyGroup(): start"); + H5Group group = null; + long srcgid = -1, dstgid = -1; + String path = null; + + if (dstGroup.isRoot()) { + path = HObject.SEPARATOR; + } + else { + path = dstGroup.getPath() + dstGroup.getName() + HObject.SEPARATOR; + } + + if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { + dstName = srcGroup.getName(); + } + + try { + srcgid = srcGroup.open(); + dstgid = dstGroup.open(); + try { + H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("copyGroup(): {} H5Ocopy(srcgid {}) failure: ", dstName, srcgid, ex); + } + + group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); + depth_first(group, Integer.MIN_VALUE); // reload all + dstGroup.addToMemberList(group); + } + + finally { + try { + srcGroup.close(srcgid); + } + catch (Exception ex) { + log.debug("copyGroup(): {} srcGroup.close(srcgid {}) failure: ", dstName, srcgid, ex); + } + try { + dstGroup.close(dstgid); + } + catch (Exception ex) { + log.debug("copyGroup(): {} pgroup.close(dstgid {}) failure: ", dstName, dstgid, ex); + } + } + + log.trace("copyGroup(): finish"); + return group; + } + + /** + * Constructs a group for specified group identifier and retrieves members. + * + * @param gid + * The group identifier. + * @param name + * The group name. + * @param pGroup + * The parent group, or null for the root group. + * + * @return The group if successful; otherwise returns false. + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + */ + private H5Group getGroup(long gid, String name, Group pGroup) throws HDF5Exception { + log.trace("getGroup(): start"); + String parentPath = null; + String thisFullName = null; + String memberFullName = null; + + if (pGroup == null) { + thisFullName = name = "/"; + } + else { + parentPath = pGroup.getFullName(); + if ((parentPath == null) || parentPath.equals("/")) { + thisFullName = "/" + name; + } + else { + thisFullName = parentPath + "/" + name; + } + } + + // get rid of any extra "/" + if (parentPath != null) { + parentPath = parentPath.replaceAll("//", "/"); + } + if (thisFullName != null) { + thisFullName = thisFullName.replaceAll("//", "/"); + } + + log.trace("getGroup(): fullName={}", thisFullName); + + H5Group group = new H5Group(this, name, parentPath, pGroup); + + H5G_info_t group_info = null; + H5O_info_t obj_info = null; + long oid = -1; + String link_name = null; + try { + group_info = H5.H5Gget_info(gid); + } + catch (Exception ex) { + log.debug("getGroup(): {} H5Gget_info(gid {}) failure: ", name, gid, ex); + } + try { + oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("getGroup(): {} H5Oopen(gid {}) failure: ", name, gid, ex); + } + + // retrieve only the immediate members of the group, do not follow + // subgroups + for (int i = 0; i < group_info.nlinks; i++) { + try { + link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, + HDF5Constants.H5P_DEFAULT); + obj_info = H5 + .H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); + } + catch (HDF5Exception ex) { + log.debug("getGroup()[{}]: {} name,info failure: ", i, name, ex); + log.trace("getGroup()[{}]: continue", i); + // do not stop if accessing one member fails + continue; + } + // create a new group + if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { + H5Group g = new H5Group(this, link_name, thisFullName, group); + group.addToMemberList(g); + } + else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { + long did = -1; + Dataset d = null; + + if ((thisFullName == null) || thisFullName.equals("/")) { + memberFullName = "/" + link_name; + } + else { + memberFullName = thisFullName + "/" + link_name; + } + + try { + did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); + d = getDataset(did, link_name, thisFullName); + } + finally { + try { + H5.H5Dclose(did); + } + catch (Exception ex) { + log.debug("getGroup()[{}]: {} H5Dclose(did {}) failure: ", i, name, did, ex); + } + } + group.addToMemberList(d); + } + else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { + Datatype t = new H5Datatype(this, link_name, thisFullName); + group.addToMemberList(t); + } + } // End of for loop. + try { + if (oid >= 0) + H5.H5Oclose(oid); + } + catch (Exception ex) { + log.debug("getGroup(): {} H5Oclose(oid {}) failure: ", name, oid, ex); + } + log.trace("getGroup(): finish"); + return group; + } + + /** + * Retrieves the name of the target object that is being linked to. + * + * @param obj + * The current link object. + * + * @return The name of the target object. + * + * @throws Exception + * If there is an error at the HDF5 library level. + */ + public static String getLinkTargetName(HObject obj) throws Exception { + log.trace("getLinkTargetName(): start"); + String[] link_value = { null, null }; + String targetObjName = null; + + if (obj == null) { + log.debug("getLinkTargetName(): object is null"); + log.trace("getLinkTargetName(): finish"); + return null; + } + + if (obj.getFullName().equals("/")) { + log.debug("getLinkTargetName(): object is root group, links not allowed"); + log.trace("getLinkTargetName(): finish"); + return null; + } + + H5L_info_t link_info = null; + try { + link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + log.debug("getLinkTargetName(): H5Lget_info {} failure: ", obj.getFullName(), err); + } + if (link_info != null) { + if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { + try { + H5.H5Lget_value(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); + } + catch (Exception ex) { + log.debug("getLinkTargetName(): H5Lget_value {} failure: ", obj.getFullName(), ex); + } + if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) + targetObjName = link_value[0]; + else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { + targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; + } + } + } + log.trace("getLinkTargetName(): finish"); + return targetObjName; + } + + /** + * Export dataset. + * + * @param file_export_name + * The file name to export data into. + * @param file_name + * The name of the HDF5 file containing the dataset. + * @param object_path + * The full path of the dataset to be exported. + * + * @throws Exception + * If there is a failure. + */ + @Override + public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) + throws Exception { + H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); + } + + /** + * Renames an attribute. + * + * @param obj + * The object whose attribute is to be renamed. + * @param oldAttrName + * The current name of the attribute. + * @param newAttrName + * The new name of the attribute. + * + * @throws Exception + * If there is an error at the HDF5 library level. + */ + @Override + public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { + log.trace("renameAttribute(): rename {} to {}", oldAttrName, newAttrName); + H5.H5Arename_by_name(obj.getFID(), obj.getFullName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); + } + + /** + * Rename the given object + * + * @param obj + * the object to be renamed. + * @param newName + * the new name of the object. + * + * @throws Exception + * If there is a failure. + */ + public static void renameObject(HObject obj, String newName) throws Exception { + String currentFullPath = obj.getPath() + obj.getName(); + String newFullPath = obj.getPath() + newName; + + currentFullPath = currentFullPath.replaceAll("//", "/"); + newFullPath = newFullPath.replaceAll("//", "/"); + + if (currentFullPath.equals("/")) { + throw new HDF5Exception("Can't rename the root group."); + } + + if (currentFullPath.equals(newFullPath)) { + throw new HDF5Exception("The new name is the same as the current name."); + } + + // Call the library to move things in the file + H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + + public static int getIndexTypeValue(String strtype) { + if (strtype.compareTo("H5_INDEX_NAME") == 0) + return HDF5Constants.H5_INDEX_NAME; + if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) + return HDF5Constants.H5_INDEX_CRT_ORDER; + if (strtype.compareTo("H5_INDEX_N") == 0) + return HDF5Constants.H5_INDEX_N; + return HDF5Constants.H5_INDEX_UNKNOWN; + } + + public static int getIndexOrderValue(String strorder) { + if (strorder.compareTo("H5_ITER_INC") == 0) + return HDF5Constants.H5_ITER_INC; + if (strorder.compareTo("H5_ITER_DEC") == 0) + return HDF5Constants.H5_ITER_DEC; + if (strorder.compareTo("H5_ITER_NATIVE") == 0) + return HDF5Constants.H5_ITER_NATIVE; + if (strorder.compareTo("H5_ITER_N") == 0) + return HDF5Constants.H5_ITER_N; + return HDF5Constants.H5_ITER_UNKNOWN; + } + + @Override + public int getIndexType(String strtype) { + if (strtype != null) { + if (strtype.compareTo("H5_INDEX_NAME") == 0) + return HDF5Constants.H5_INDEX_NAME; + if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) + return HDF5Constants.H5_INDEX_CRT_ORDER; + return HDF5Constants.H5_INDEX_UNKNOWN; + } + return getIndexType(); + } + + public int getIndexType() { + return indexType; + } + + @Override + public void setIndexType(int indexType) { + this.indexType = indexType; + } + + @Override + public int getIndexOrder(String strorder) { + if (strorder != null) { + if (strorder.compareTo("H5_ITER_INC") == 0) + return HDF5Constants.H5_ITER_INC; + if (strorder.compareTo("H5_ITER_DEC") == 0) + return HDF5Constants.H5_ITER_DEC; + if (strorder.compareTo("H5_ITER_NATIVE") == 0) + return HDF5Constants.H5_ITER_NATIVE; + if (strorder.compareTo("H5_ITER_N") == 0) + return HDF5Constants.H5_ITER_N; + return HDF5Constants.H5_ITER_UNKNOWN; + } + return getIndexOrder(); + } + + public int getIndexOrder() { + return indexOrder; + } + + @Override + public void setIndexOrder(int indexOrder) { + this.indexOrder = indexOrder; + } +} diff --git a/src/main/java/hdf/object/h5/H5Group.java b/src/main/java/hdf/object/h5/H5Group.java new file mode 100644 index 0000000..cab18a8 --- /dev/null +++ b/src/main/java/hdf/object/h5/H5Group.java @@ -0,0 +1,506 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.util.List; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.structs.H5G_info_t; +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.Attribute; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.HObject; + +/** + * An H5Group object represents an existing HDF5 group in file. + *

+ * In HDF5, every object has at least one name. An HDF5 group is used to store a + * set of the names together in one place, i.e. a group. The general structure + * of a group is similar to that of the UNIX file system in that the group may + * contain references to other groups or data objects just as the UNIX directory + * may contain sub-directories or files. + *

+ * For more information on HDF5 Groups, + * + * HDF5 User's Guide + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public class H5Group extends Group { + + private static final long serialVersionUID = -951164512330444150L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Group.class); + + /** + * The list of attributes of this data object. Members of the list are + * instance of Attribute. + */ + @SuppressWarnings("rawtypes") + protected List attributeList; + + private int nAttributes = -1; + + private H5O_info_t obj_info; + + /** + * Constructs an HDF5 group with specific name, path, and parent. + * + * @param theFile + * the file which containing the group. + * @param name + * the name of this group, e.g. "grp01". + * @param path + * the full path of this group, e.g. "/groups/". + * @param parent + * the parent of this group. + */ + public H5Group(FileFormat theFile, String name, String path, Group parent) { + this(theFile, name, path, parent, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #H5Group(FileFormat, String, String, Group)} + * + * @param theFile + * the file which containing the group. + * @param name + * the name of this group, e.g. "grp01". + * @param path + * the full path of this group, e.g. "/groups/". + * @param parent + * the parent of this group. + * @param oid + * the oid of this group. + */ + @Deprecated + public H5Group(FileFormat theFile, String name, String path, Group parent, long[] oid) { + super(theFile, name, path, parent, oid); + nMembersInFile = -1; + obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null); + + if ((oid == null) && (theFile != null)) { + // retrieve the object ID + try { + byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1); + this.oid = new long[1]; + this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0); + } + catch (Exception ex) { + this.oid = new long[1]; + this.oid[0] = 0; + } + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#hasAttribute() + */ + @Override + public boolean hasAttribute() { + obj_info.num_attrs = nAttributes; + + if (obj_info.num_attrs < 0) { + long gid = open(); + if (gid > 0) { + try { + obj_info = H5.H5Oget_info(gid); + + } + catch (Exception ex) { + obj_info.num_attrs = 0; + } + close(gid); + } + } + + log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs); + + return (obj_info.num_attrs > 0); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Group#getNumberOfMembersInFile() + */ + @Override + public int getNumberOfMembersInFile() { + if (nMembersInFile < 0) { + long gid = open(); + if (gid > 0) { + try { + H5G_info_t group_info = null; + group_info = H5.H5Gget_info(gid); + nMembersInFile = (int) group_info.nlinks; + } + catch (Exception ex) { + nMembersInFile = 0; + } + close(gid); + } + } + return nMembersInFile; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Group#clear() + */ + @SuppressWarnings("rawtypes") + @Override + public void clear() { + super.clear(); + + if (attributeList != null) { + ((Vector) attributeList).setSize(0); + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata() + */ + @Override + @SuppressWarnings("rawtypes") + public List getMetadata() throws HDF5Exception { + return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null)); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata(int...) + */ + @SuppressWarnings("rawtypes") + public List getMetadata(int... attrPropList) throws HDF5Exception { + log.trace("getMetadata(): start"); + if (attributeList == null) { + log.trace("getMetadata(): get attributeList"); + + int indxType = fileFormat.getIndexType(null); + int order = fileFormat.getIndexOrder(null); + + if (attrPropList.length > 0) { + indxType = attrPropList[0]; + if (attrPropList.length > 1) { + order = attrPropList[1]; + } + } + try { + attributeList = H5File.getAttribute(this, indxType, order); + } + catch (Exception ex) { + log.debug("getMetadata(): H5File.getAttribute failure: ", ex); + } + } + + try { + if (!this.isRoot()) this.linkTargetObjName = H5File.getLinkTargetName(this); + } + catch (Exception ex) { + log.debug("getMetadata(): getLinkTargetName failure: ", ex); + } + + log.trace("getMetadata(): finish"); + return attributeList; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#writeMetadata(java.lang.Object) + */ + @Override + @SuppressWarnings("unchecked") + public void writeMetadata(Object info) throws Exception { + log.trace("writeMetadata(): start"); + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("writeMetadata(): Object not an Attribute"); + log.trace("writeMetadata(): finish"); + return; + } + + boolean attrExisted = false; + Attribute attr = (Attribute) info; + log.trace("writeMetadata(): {}", attr.getName()); + + if (attributeList == null) { + this.getMetadata(); + } + + if (attributeList != null) attrExisted = attributeList.contains(attr); + + getFileFormat().writeAttribute(this, attr, attrExisted); + // add the new attribute into attribute list + if (!attrExisted) { + attributeList.add(attr); + nAttributes = attributeList.size(); + } + log.trace("writeMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#removeMetadata(java.lang.Object) + */ + @Override + @SuppressWarnings("rawtypes") + public void removeMetadata(Object info) throws HDF5Exception { + log.trace("removeMetadata(): start"); + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("removeMetadata(): Object not an Attribute"); + log.trace("removeMetadata(): finish"); + return; + } + + Attribute attr = (Attribute) info; + log.trace("removeMetadata(): {}", attr.getName()); + long gid = open(); + if(gid >= 0) { + try { + H5.H5Adelete(gid, attr.getName()); + List attrList = getMetadata(); + attrList.remove(attr); + nAttributes = attributeList.size(); + } + finally { + close(gid); + } + } + else { + log.debug("removeMetadata(): failed to open group"); + } + + log.trace("removeMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#updateMetadata(java.lang.Object) + */ + @Override + public void updateMetadata(Object info) throws HDF5Exception { + log.trace("updateMetadata(): start"); + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("updateMetadata(): Object not an Attribute"); + log.trace("updateMetadata(): finish"); + return; + } + + nAttributes = -1; + log.trace("updateMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#open() + */ + @Override + public long open() { + log.trace("open(): start"); + long gid = -1; + + try { + if (isRoot()) { + gid = H5.H5Gopen(getFID(), SEPARATOR, HDF5Constants.H5P_DEFAULT); + } + else { + gid = H5.H5Gopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + } + + } + catch (HDF5Exception ex) { + gid = -1; + } + + log.trace("open(): finish"); + return gid; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#close(int) + */ + @Override + public void close(long gid) { + try { + H5.H5Gclose(gid); + } + catch (HDF5Exception ex) { + log.debug("close(): H5Gclose(gid {}): ", gid, ex); + } + } + + /** + * Creates a new group with a name in a group and with the group creation + * properties specified in gplist. + *

+ * The gplist contains a sequence of group creation property list + * identifiers, lcpl, gcpl, gapl. It allows the user to create a group with + * group creation properties. It will close the group creation properties + * specified in gplist. + * + * @see H5#H5Gcreate(long, String, long, long, long) for the + * order of property list identifiers. + * + * @param name + * The name of a new group. + * @param pgroup + * The parent group object. + * @param gplist + * The group creation properties, in which the order of the + * properties conforms the HDF5 library API, H5Gcreate(), i.e. + * lcpl, gcpl and gapl, where + *

    + *
  • lcpl : Property list for link creation
  • gcpl : Property + * list for group creation
  • gapl : Property list for group + * access + *
+ * + * @return The new group if successful; otherwise returns null. + * + * @throws Exception if there is a failure. + */ + public static H5Group create(String name, Group pgroup, long... gplist) throws Exception { + log.trace("create(): start"); + H5Group group = null; + String fullPath = null; + long lcpl = HDF5Constants.H5P_DEFAULT; + long gcpl = HDF5Constants.H5P_DEFAULT; + long gapl = HDF5Constants.H5P_DEFAULT; + + if (gplist.length > 0) { + lcpl = gplist[0]; + if (gplist.length > 1) { + gcpl = gplist[1]; + if (gplist.length > 2) gapl = gplist[2]; + } + } + + if ((name == null) || (pgroup == null)) { + log.debug("create(): one or more parameters are null"); + log.trace("create(): finish"); + System.err.println("(name == null) || (pgroup == null)"); + return null; + } + + H5File file = (H5File) pgroup.getFileFormat(); + + if (file == null) { + log.debug("create(): Parent Group FileFormat is null"); + log.trace("create(): finish"); + System.err.println("Could not get file that contains object"); + return null; + } + + String path = HObject.SEPARATOR; + if (!pgroup.isRoot()) { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + if (name.endsWith("/")) { + name = name.substring(0, name.length() - 1); + } + int idx = name.lastIndexOf('/'); + if (idx >= 0) { + name = name.substring(idx + 1); + } + } + + fullPath = path + name; + + // create a new group and add it to the parent node + long gid = H5.H5Gcreate(file.open(), fullPath, lcpl, gcpl, gapl); + try { + H5.H5Gclose(gid); + } + catch (Exception ex) { + log.debug("create(): H5Gcreate {} H5Gclose(gid {}) failure: ", fullPath, gid, ex); + } + + byte[] ref_buf = H5.H5Rcreate(file.open(), fullPath, HDF5Constants.H5R_OBJECT, -1); + long l = HDFNativeData.byteToLong(ref_buf, 0); + long[] oid = { l }; + + group = new H5Group(file, name, path, pgroup, oid); + + if (group != null) { + pgroup.addToMemberList(group); + } + + if (gcpl > 0) { + try { + H5.H5Pclose(gcpl); + } + catch (final Exception ex) { + log.debug("create(): create prop H5Pclose(gcpl {}) failure: ", gcpl, ex); + } + } + + log.trace("create(): finish"); + return group; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#setName(java.lang.String) + */ + @Override + public void setName(String newName) throws Exception { + H5File.renameObject(this, newName); + super.setName(newName); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#setPath(java.lang.String) + */ + @SuppressWarnings("rawtypes") + @Override + public void setPath(String newPath) throws Exception { + super.setPath(newPath); + + List members = this.getMemberList(); + if (members == null) { + return; + } + + int n = members.size(); + HObject obj = null; + for (int i = 0; i < n; i++) { + obj = (HObject) members.get(i); + obj.setPath(getPath() + getName() + HObject.SEPARATOR); + } + } +} diff --git a/src/main/java/hdf/object/h5/H5Link.java b/src/main/java/hdf/object/h5/H5Link.java new file mode 100644 index 0000000..88efd56 --- /dev/null +++ b/src/main/java/hdf/object/h5/H5Link.java @@ -0,0 +1,112 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.util.List; + +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.FileFormat; +import hdf.object.HObject; +import hdf.object.MetaDataContainer; + +/** + * An H5Link object represents an existing HDF5 object in file. + *

+ * H5Link object is an HDF5 object that is either a soft or an external link to + * an object in a file that does not exist. The type of the object is unknown. + * Once the object being linked to is created, and the type is known, then + * H5link object will change its type. + * + * @version 2.7.2 7/6/2010 + * @author Nidhi Gupta + */ + +public class H5Link extends HObject implements MetaDataContainer { + private static final long serialVersionUID = -8137277460521594367L; + + @SuppressWarnings("unused") + private H5O_info_t obj_info; + + /** + * Constructs an HDF5 link with specific name, path, and parent. + * + * @param theFile + * the file which containing the link. + * @param name + * the name of this link, e.g. "link1". + * @param path + * the full path of this link, e.g. "/groups/". + */ + public H5Link(FileFormat theFile, String name, String path) { + this (theFile, name, path, null); + } + + @SuppressWarnings("deprecation") + public H5Link(FileFormat theFile, String theName, String thePath, + long[] oid) { + super(theFile, theName, thePath, oid); + + obj_info = new H5O_info_t(-1L, -1L, -1, 0, -1L, 0L, 0L, 0L, 0L, null,null,null); + } + + @Override + public void close(long id) { + } + + @Override + public long open() { + return 0; + } + + @SuppressWarnings("rawtypes") + public List getMetadata() throws Exception { + + try{ + this.linkTargetObjName= H5File.getLinkTargetName(this); + }catch(Exception ex){ + } + + return null; + } + + public boolean hasAttribute() { + return false; + } + + public void removeMetadata(Object info) throws Exception { + } + + public void writeMetadata(Object info) throws Exception { + } + + public void updateMetadata(Object info) throws Exception { + } + + @SuppressWarnings("rawtypes") + public List getMetadata(int... attrPropList) throws Exception { + return null; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#setName(java.lang.String) + */ + @Override + public void setName(String newName) throws Exception { + H5File.renameObject(this, newName); + super.setName(newName); + } +} diff --git a/src/main/java/hdf/object/h5/H5ScalarDS.java b/src/main/java/hdf/object/h5/H5ScalarDS.java new file mode 100644 index 0000000..f236a83 --- /dev/null +++ b/src/main/java/hdf/object/h5/H5ScalarDS.java @@ -0,0 +1,2491 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import java.lang.reflect.Array; +import java.text.DecimalFormat; +import java.util.List; +import java.util.Vector; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.exceptions.HDF5DataFiltersException; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import hdf.hdf5lib.structs.H5O_info_t; +import hdf.object.Attribute; +import hdf.object.Dataset; +import hdf.object.Datatype; +import hdf.object.FileFormat; +import hdf.object.Group; +import hdf.object.HObject; +import hdf.object.ScalarDS; + +/** + * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long, + * float, double and string, and operations performed on the scalar dataset. + *

+ * The library predefines a modest number of datatypes. For details, + * read HDF5 Datatypes + * + * @version 1.1 9/4/2007 + * @author Peter X. Cao + */ +public class H5ScalarDS extends ScalarDS { + private static final long serialVersionUID = 2887517608230611642L; + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class); + + /** + * The list of attributes of this data object. Members of the list are instance of Attribute. + */ + private List attributeList; + + private int nAttributes = -1; + + private H5O_info_t objInfo; + + /** + * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the + * array length is 8*numberOfPalettes. + */ + private byte[] paletteRefs; + + /** flag to indicate if the dataset is an external dataset */ + private boolean isExternal = false; + + /** flag to indicate is the dataset is a virtual dataset */ + private boolean isVirtual = false; + private List virtualNameList; + + /** + * flag to indicate if the datatype in file is the same as dataype in memory + */ + private boolean isNativeDatatype = false; + + /* + * Enum to indicate the type of I/O to perform inside of the common I/O + * function. + */ + protected enum IO_TYPE { + READ, WRITE + }; + + /** + * Constructs an instance of a H5 scalar dataset with given file, dataset name and path. + *

+ * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group + * path of the dataset. + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + */ + public H5ScalarDS(FileFormat theFile, String theName, String thePath) { + this(theFile, theName, thePath, null); + } + + /** + * @deprecated Not for public use in the future.
+ * Using {@link #H5ScalarDS(FileFormat, String, String)} + * + * @param theFile + * the file that contains the data object. + * @param theName + * the name of the data object, e.g. "dset". + * @param thePath + * the full path of the data object, e.g. "/arrays/". + * @param oid + * the oid of the data object. + */ + @Deprecated + public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) { + super(theFile, theName, thePath, oid); + unsignedConverted = false; + paletteRefs = null; + objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null); + + if ((oid == null) && (theFile != null)) { + // retrieve the object ID + try { + byte[] refbuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1); + this.oid = new long[1]; + this.oid[0] = HDFNativeData.byteToLong(refbuf, 0); + } + catch (Exception ex) { + log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName()); + } + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#open() + */ + @Override + public long open() { + log.trace("open(): start"); + long did = -1; + + try { + did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT); + log.trace("open(): did={}", did); + } + catch (HDF5Exception ex) { + log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex); + did = -1; + } + + log.trace("open(): finish"); + return did; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#close(int) + */ + @Override + public void close(long did) { + log.trace("close(): start"); + + if (did >= 0) { + try { + H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL); + } + catch (Exception ex) { + log.debug("close(): H5Fflush(did {}) failure: ", did, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("close(): H5Dclose(did {}) failure: ", did, ex); + } + } + + log.trace("close(): finish"); + } + + /** + * Retrieves datatype and dataspace information from file and sets the dataset + * in memory. + *

+ * The init() is designed to support lazy operation in a dataset object. When a + * data object is retrieved from file, the datatype, dataspace and raw data are + * not loaded into memory. When it is asked to read the raw data from file, + * init() is first called to get the datatype and dataspace information, then + * load the raw data from file. + *

+ * init() is also used to reset the selection of a dataset (start, stride and + * count) to the default, which is the entire dataset for 1D or 2D datasets. In + * the following example, init() at step 1) retrieves datatype and dataspace + * information from file. getData() at step 3) reads only one data point. init() + * at step 4) resets the selection to the whole dataset. getData() at step 4) + * reads the values of whole dataset into memory. + * + *

+     * dset = (Dataset) file.get(NAME_DATASET);
+     *
+     * // 1) get datatype and dataspace information from file
+     * dset.init();
+     * rank = dset.getRank(); // rank = 2, a 2D dataset
+     * count = dset.getSelectedDims();
+     * start = dset.getStartDims();
+     * dims = dset.getDims();
+     *
+     * // 2) select only one data point
+     * for (int i = 0; i < rank; i++) {
+     *     start[0] = 0;
+     *     count[i] = 1;
+     * }
+     *
+     * // 3) read one data point
+     * data = dset.getData();
+     *
+     * // 4) reset selection to the whole dataset
+     * dset.init();
+     *
+     * // 5) clean the memory data buffer
+     * dset.clearData();
+     *
+     * // 6) Read the whole dataset
+     * data = dset.getData();
+     * 
+ */ + @Override + public void init() { + log.trace("init(): start"); + + if (inited) { + resetSelection(); + log.trace("init(): Dataset already intialized"); + log.trace("init(): exit"); + return; // already called. Initialize only once + } + + long did = -1; + long tid = -1; + long sid = -1; + long nativeTID = -1; + + did = open(); + if (did >= 0) { + // check if it is an external or virtual dataset + long pid = -1; + try { + pid = H5.H5Dget_create_plist(did); + try { + int nfiles = H5.H5Pget_external_count(pid); + isExternal = (nfiles > 0); + int layoutType = H5.H5Pget_layout(pid); + if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) { + try { + long vmaps = H5.H5Pget_virtual_count(pid); + if (vmaps > 0) { + virtualNameList = new Vector<>(); + for (long next = 0; next < vmaps; next++) { + try { + String fname = H5.H5Pget_virtual_filename(pid, next); + virtualNameList.add(fname); + log.trace("init(): virtualNameList[{}]={}", next, fname); + } + catch (Exception err) { + log.trace("init(): vds[{}] continue", next); + continue; + } + } + } + } + catch (Exception err) { + log.debug("init(): vds count error: ", err); + } + } + log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual); + } + catch (Exception ex) { + log.debug("init(): check if it is an external or virtual dataset: ", ex); + } + } + catch (Exception ex) { + log.debug("init(): H5Dget_create_plist: ", ex); + } + finally { + try { + H5.H5Pclose(pid); + } + catch (Exception ex) { + log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex); + } + } + + paletteRefs = getPaletteRefs(did); + + try { + sid = H5.H5Dget_space(did); + rank = H5.H5Sget_simple_extent_ndims(sid); + tid = H5.H5Dget_type(did); + + log.trace("init(): tid={} sid={} rank={}", tid, sid, rank); + + try { + datatype = new H5Datatype(tid); + + log.trace("init(): tid={} is tclass={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", + tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isVLEN(), + datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef()); + } + catch (Exception ex) { + log.debug("init(): failed to create datatype for dataset: ", ex); + datatype = null; + } + + // Check if the datatype in the file is the native datatype + try { + nativeTID = H5.H5Tget_native_type(tid); + isNativeDatatype = H5.H5Tequal(tid, nativeTID); + log.trace("init(): isNativeDatatype={}", isNativeDatatype); + } + catch (Exception ex) { + log.debug("init(): check if native type failure: ", ex); + } + + try { + pid = H5.H5Dget_create_plist(did); + int[] fillStatus = { 0 }; + if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) { + // Check if fill value is user-defined before retrieving it. + if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) { + try { + fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1); + } + catch (OutOfMemoryError e) { + log.debug("init(): out of memory: ", e); + fillValue = null; + } + catch (Exception ex) { + log.debug("init(): allocate fill value buffer failed: ", ex); + fillValue = null; + } + + log.trace("init(): fillValue={}", fillValue); + try { + H5.H5Pget_fill_value(pid, nativeTID, fillValue); + log.trace("init(): H5Pget_fill_value={}", fillValue); + if (fillValue != null) { + if (datatype.isUnsigned() && !isFillValueConverted) { + fillValue = ScalarDS.convertFromUnsignedC(fillValue, null); + isFillValueConverted = true; + } + + int n = Array.getLength(fillValue); + for (int i = 0; i < n; i++) + addFilteredImageValue((Number) Array.get(fillValue, i)); + } + } + catch (Exception ex2) { + log.debug("init(): fill value was defined: ", ex2); + fillValue = null; + } + } + } + } + catch (HDF5Exception ex) { + log.debug("init(): check if fill value is defined failure: ", ex); + } + finally { + try { + H5.H5Pclose(pid); + } + catch (Exception ex) { + log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex); + } + } + + if (rank == 0) { + // a scalar data point + rank = 1; + dims = new long[1]; + dims[0] = 1; + log.trace("init(): rank is a scalar data point"); + } + else { + dims = new long[rank]; + maxDims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, dims, maxDims); + log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); + } + + inited = true; + } + catch (HDF5Exception ex) { + log.debug("init(): ", ex); + } + finally { + try { + H5.H5Tclose(nativeTID); + } + catch (Exception ex2) { + log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2); + } + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex2) { + log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); + } + try { + H5.H5Sclose(sid); + } + catch (HDF5Exception ex2) { + log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); + } + } + + // check for the type of image and interlace mode + // it is a true color image at one of three cases: + // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR, + // 2) INTERLACE_MODE = INTERLACE_PIXEL, + // 3) INTERLACE_MODE = INTERLACE_PLANE + if ((rank >= 3) && isImage) { + interlace = -1; + isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR"); + + if (isTrueColor) { + interlace = INTERLACE_PIXEL; + if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) { + interlace = INTERLACE_PLANE; + } + } + } + + log.trace("init(): close dataset"); + close(did); + + startDims = new long[rank]; + selectedDims = new long[rank]; + + resetSelection(); + } + else { + log.debug("init(): failed to open dataset"); + } + log.trace("init(): rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims); + log.trace("init(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#hasAttribute() + */ + @Override + public boolean hasAttribute() { + objInfo.num_attrs = nAttributes; + + if (objInfo.num_attrs < 0) { + long did = open(); + if (did >= 0) { + long tid = -1; + objInfo.num_attrs = 0; + + try { + objInfo = H5.H5Oget_info(did); + nAttributes = (int) objInfo.num_attrs; + + tid = H5.H5Dget_type(did); + H5Datatype dsDatatype = new H5Datatype(tid); + + log.trace("hasAttribute(): dataclass type: isText={},isVLEN={},isEnum={}", dsDatatype.isText(), dsDatatype.isVLEN(), dsDatatype.isEnum()); + } + catch (Exception ex) { + objInfo.num_attrs = 0; + log.debug("hasAttribute(): get object info: ", ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("hasAttribute(): H5Tclose(tid {}) failure: ", tid, ex); + } + } + + if(nAttributes > 0) { + // test if it is an image + // check image + Object avalue = getAttrValue(did, "CLASS"); + if (avalue != null) { + try { + isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim()); + log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue); + } + catch (Exception err) { + log.debug("hasAttribute(): check image: ", err); + } + } + + // retrieve the IMAGE_MINMAXRANGE + avalue = getAttrValue(did, "IMAGE_MINMAXRANGE"); + if (avalue != null) { + double x0 = 0; + double x1 = 0; + try { + x0 = Double.parseDouble(Array.get(avalue, 0).toString()); + x1 = Double.parseDouble(Array.get(avalue, 1).toString()); + } + catch (Exception ex2) { + x0 = x1 = 0; + } + if (x1 > x0) { + imageDataRange = new double[2]; + imageDataRange[0] = x0; + imageDataRange[1] = x1; + } + } + + try { + checkCFconvention(did); + } + catch (Exception ex) { + log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex); + } + } + close(did); + } + else { + log.debug("hasAttribute(): could not open dataset"); + } + } + + log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs); + return (objInfo.num_attrs > 0); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getDatatype() + */ + @Override + public Datatype getDatatype() { + log.trace("getDatatype(): start"); + + if (!inited) + init(); + + if (datatype == null) { + long did = -1; + long tid = -1; + + log.trace("getDatatype(): datatype == null"); + + did = open(); + if (did >= 0) { + try { + tid = H5.H5Dget_type(did); + log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype); + if (!isNativeDatatype) { + long tmptid = -1; + try { + tmptid = H5Datatype.toNative(tid); + if (tmptid >= 0) { + try { + H5.H5Tclose(tid); + } + catch (Exception ex2) { + log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2); + } + tid = tmptid; + } + } + catch (Exception ex) { + log.debug("getDatatype(): toNative: ", ex); + } + } + datatype = new H5Datatype(tid); + } + catch (Exception ex) { + log.debug("getDatatype(): get datatype failure: ", ex); + } + finally { + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex); + } + } + } + } + + if (isExternal) { + String pdir = this.getFileFormat().getAbsoluteFile().getParent(); + + if (pdir == null) { + pdir = "."; + } + System.setProperty("user.dir", pdir); + log.trace("getDatatype(): External dataset: user.dir={}", pdir); + } + + log.trace("getDatatype(): finish"); + return datatype; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#clear() + */ + @Override + public void clear() { + super.clear(); + + if (attributeList != null) { + ((Vector) attributeList).setSize(0); + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#readBytes() + */ + @Override + public byte[] readBytes() throws HDF5Exception { + log.trace("readBytes(): start"); + + byte[] theData = null; + + if (!isInited()) + init(); + + long did = open(); + if (did >= 0) { + long fspace = -1; + long mspace = -1; + long tid = -1; + + try { + long[] lsize = { 1 }; + for (int j = 0; j < selectedDims.length; j++) { + lsize[0] *= selectedDims[j]; + } + + fspace = H5.H5Dget_space(did); + mspace = H5.H5Screate_simple(rank, selectedDims, null); + + // set the rectangle selection + // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump + if (rank * dims[0] > 1) { + H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, + selectedDims, null); // set block to 1 + } + + tid = H5.H5Dget_type(did); + long size = H5.H5Tget_size(tid) * lsize[0]; + log.trace("readBytes(): size = {}", size); + + if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size"); + + theData = new byte[(int)size]; + + log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace); + H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData); + } + catch (Exception ex) { + log.debug("readBytes(): failed to read data: ", ex); + } + finally { + try { + H5.H5Sclose(fspace); + } + catch (Exception ex2) { + log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2); + } + try { + H5.H5Sclose(mspace); + } + catch (Exception ex2) { + log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2); + } + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex2) { + log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); + } + close(did); + } + } + + log.trace("readBytes(): finish"); + return theData; + } + + /** + * Reads the data from file. + *

+ * read() reads the data from file to a memory buffer and returns the memory + * buffer. The dataset object does not hold the memory buffer. To store the + * memory buffer in the dataset object, one must call getData(). + *

+ * By default, the whole dataset is read into memory. Users can also select + * a subset to read. Subsetting is done in an implicit way. + *

+ * How to Select a Subset + *

+ * A selection is specified by three arrays: start, stride and count. + *

    + *
  1. start: offset of a selection + *
  2. stride: determines how many elements to move in each dimension + *
  3. count: number of elements to select in each dimension + *
+ * getStartDims(), getStride() and getSelectedDims() returns the start, + * stride and count arrays respectively. Applications can make a selection + * by changing the values of the arrays. + *

+ * The following example shows how to make a subset. In the example, the + * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; + * dims[1]=100; dims[2]=50; dims[3]=10;
+ * We want to select every other data point in dims[1] and dims[2] + * + *

+     * int rank = dataset.getRank(); // number of dimensions of the dataset
+     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
+     * long[] selected = dataset.getSelectedDims(); // the selected size of the
+     *                                              // dataset
+     * long[] start = dataset.getStartDims(); // the offset of the selection
+     * long[] stride = dataset.getStride(); // the stride of the dataset
+     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
+     *                                                   // dimensions for
+     *                                                   // display
+     *
+     * // select dim1 and dim2 as 2D data for display, and slice through dim0
+     * selectedIndex[0] = 1;
+     * selectedIndex[1] = 2;
+     * selectedIndex[1] = 0;
+     *
+     * // reset the selection arrays
+     * for (int i = 0; i < rank; i++) {
+     *     start[i] = 0;
+     *     selected[i] = 1;
+     *     stride[i] = 1;
+     * }
+     *
+     * // set stride to 2 on dim1 and dim2 so that every other data point is
+     * // selected.
+     * stride[1] = 2;
+     * stride[2] = 2;
+     *
+     * // set the selection size of dim1 and dim2
+     * selected[1] = dims[1] / stride[1];
+     * selected[2] = dims[1] / stride[2];
+     *
+     * // when dataset.getData() is called, the selection above will be used
+     * // since
+     * // the dimension arrays are passed by reference. Changes of these arrays
+     * // outside the dataset object directly change the values of these array
+     * // in the dataset object.
+     * 
+ *

+ * For ScalarDS, the memory data buffer is a one-dimensional array of byte, + * short, int, float, double or String type based on the datatype of the + * dataset. + *

+ * For CompoundDS, the memory data object is an java.util.List object. Each + * element of the list is a data array that corresponds to a compound field. + *

+ * For example, if compound dataset "comp" has the following nested + * structure, and member datatypes + * + *

+     * comp --> m01 (int)
+     * comp --> m02 (float)
+     * comp --> nest1 --> m11 (char)
+     * comp --> nest1 --> m12 (String)
+     * comp --> nest1 --> nest2 --> m21 (long)
+     * comp --> nest1 --> nest2 --> m22 (double)
+     * 
+ * + * getData() returns a list of six arrays: {int[], float[], char[], + * String[], long[] and double[]}. + * + * @return the data read from file. + * + * @see #getData() + * @see hdf.object.DataFormat#read() + * + * @throws Exception + * if object can not be read + */ + @Override + public Object read() throws Exception { + log.trace("read(): start"); + + Object readData = null; + + if (!isInited()) + init(); + + try { + readData = scalarDatasetCommonIO(IO_TYPE.READ, null); + } + catch (Exception ex) { + log.debug("read(): failed to read scalar dataset: ", ex); + throw new Exception("Failed to read scalar dataset: " + ex.getMessage(), ex); + } + + log.trace("read(): finish"); + + return readData; + } + + /** + * Writes the given data buffer into this dataset in a file. + * + * @param buf + * The buffer that contains the data values. + * + * @throws Exception + * If there is an error at the HDF5 library level. + */ + @Override + public void write(Object buf) throws Exception { + log.trace("write(): start"); + + if (this.getFileFormat().isReadOnly()) + throw new Exception("cannot write to scalar dataset in file opened as read-only"); + + if (!isInited()) + init(); + + try { + scalarDatasetCommonIO(IO_TYPE.WRITE, buf); + } + catch (Exception ex) { + log.debug("write(): failed to write to scalar dataset: ", ex); + throw new Exception("Failed to write to scalar dataset: " + ex.getMessage(), ex); + } + + log.trace("write(): finish"); + } + + private Object scalarDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception { + log.trace("scalarDatasetCommonIO(): start"); + + H5Datatype dsDatatype = (H5Datatype) getDatatype(); + Object theData = null; + + /* + * I/O type-specific pre-initialization. + */ + if (ioType == IO_TYPE.WRITE) { + if (writeBuf == null) { + log.debug("scalarDatasetCommonIO(): writeBuf is null"); + log.trace("scalarDatasetCommonIO(): exit"); + throw new Exception("write buffer is null"); + } + + /* + * Check for any unsupported datatypes and fail early before + * attempting to write to the dataset. + */ + if (dsDatatype.isVLEN() && !dsDatatype.isText()) { + log.debug("scalarDatasetCommonIO(): Cannot write non-string variable-length data"); + log.trace("scalarDatasetCommonIO(): exit"); + throw new HDF5Exception("Writing non-string variable-length data is not supported"); + } + + if (dsDatatype.isRegRef()) { + log.debug("scalarDatasetCommonIO(): Cannot write region reference data"); + log.trace("scalarDatasetCommonIO(): exit"); + throw new HDF5Exception("Writing region reference data is not supported"); + } + } + + log.trace("scalarDatasetCommonIO(): open dataset"); + + long did = open(); + if (did >= 0) { + long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace + + try { + /* + * NOTE: this call sets up a hyperslab selection in the file according to the + * current selection in the dataset object. + */ + long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims, + selectedStride, selectedDims, spaceIDs); + + if (ioType == IO_TYPE.READ) { + if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj() + || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) { + try { + theData = H5Datatype.allocateArray(dsDatatype, (int) totalSelectedSpacePoints); + } + catch (OutOfMemoryError err) { + log.debug("scalarDatasetCommonIO(): Out of memory"); + log.trace("scalarDatasetCommonIO(): exit"); + throw new HDF5Exception("Out Of Memory"); + } + } + else { + // reuse the buffer if the size is the same + theData = originalBuf; + } + + if (theData != null) { + /* + * Actually read the data now that everything has been setup. + */ + long tid = -1; + try { + tid = dsDatatype.createNative(); + + if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) { + log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", + did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, + (Object[]) theData); + } + else { + log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", + did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData); + } + } + catch (HDF5DataFiltersException exfltr) { + log.debug("scalarDatasetCommonIO(): read failure: ", exfltr); + log.trace("scalarDatasetCommonIO(): exit"); + throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr); + } + catch (Exception ex) { + log.debug("scalarDatasetCommonIO(): read failure: ", ex); + log.trace("scalarDatasetCommonIO(): exit"); + throw new Exception(ex.getMessage(), ex); + } + finally { + dsDatatype.close(tid); + } + + /* + * Perform any necessary data conversions. + */ + if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { + log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array"); + theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize()); + } + else if (dsDatatype.isRefObj()) { + log.trace("scalarDatasetCommonIO(): isREF: converting byte array to long array"); + theData = HDFNativeData.byteToLong((byte[]) theData); + } + } + } // IO_TYPE.READ + else { + /* + * Perform any necessary data conversions before writing the data. + * + * Note that v-len strings do not get converted, regardless of + * conversion request type. + */ + Object tmpData = writeBuf; + try { + // Check if we need to convert integer data + int tsize = (int) dsDatatype.getDatatypeSize(); + String cname = writeBuf.getClass().getName(); + char dname = cname.charAt(cname.lastIndexOf("[") + 1); + boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I')) + || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted)); + + if (doIntConversion) { + log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers"); + tmpData = convertToUnsignedC(writeBuf, null); + } + else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) { + log.trace("scalarDatasetCommonIO(): converting string array to byte array"); + tmpData = stringToByte((String[]) writeBuf, tsize); + } + else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) { + log.trace("scalarDatasetCommonIO(): converting enum names to values"); + tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf); + } + } + catch (Exception ex) { + log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex); + log.trace("scalarDatasetCommonIO(): exit"); + throw new Exception("data conversion failure: " + ex.getMessage()); + } + + /* + * Actually write the data now that everything has been setup. + */ + long tid = -1; + try { + tid = dsDatatype.createNative(); + + if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) { + log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", + did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData); + } + else { + log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", + did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0], + (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]); + + H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData); + } + } + catch (Exception ex) { + log.debug("scalarDatasetCommonIO(): write failure: ", ex); + log.trace("scalarDatasetCommonIO(): exit"); + throw new Exception(ex.getMessage()); + } + finally { + dsDatatype.close(tid); + } + } // IO_TYPE.WRITE + } + finally { + if (HDF5Constants.H5S_ALL != spaceIDs[0]) { + try { + H5.H5Sclose(spaceIDs[0]); + } + catch (Exception ex) { + log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex); + } + } + + if (HDF5Constants.H5S_ALL != spaceIDs[1]) { + try { + H5.H5Sclose(spaceIDs[1]); + } + catch (Exception ex) { + log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex); + } + } + + close(did); + } + } + else + log.debug("scalarDatasetCommonIO(): failed to open dataset"); + + log.trace("scalarDatasetCommonIO(): finish"); + + return theData; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata() + */ + @Override + public List getMetadata() throws HDF5Exception { + return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null)); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#getMetadata(int...) + */ + public List getMetadata(int... attrPropList) throws HDF5Exception { + log.trace("getMetadata(): start"); + + if (!isInited()) { + init(); + log.trace("getMetadata(): inited"); + } + + try { + this.linkTargetObjName = H5File.getLinkTargetName(this); + } + catch (Exception ex) { + log.debug("getMetadata(): getLinkTargetName failed: ", ex); + } + + if (attributeList != null) { + log.trace("getMetadata(): attributeList != null"); + log.trace("getMetadata(): finish"); + return attributeList; + } + + long did = -1; + long pcid = -1; + long paid = -1; + int indxType = fileFormat.getIndexType(null); + int order = fileFormat.getIndexOrder(null); + + // load attributes first + if (attrPropList.length > 0) { + indxType = attrPropList[0]; + if (attrPropList.length > 1) { + order = attrPropList[1]; + } + } + + attributeList = H5File.getAttribute(this, indxType, order); + log.trace("getMetadata(): attributeList loaded"); + + log.trace("getMetadata(): open dataset"); + did = open(); + if (did >= 0) { + log.trace("getMetadata(): dataset opened"); + try { + // get the compression and chunk information + pcid = H5.H5Dget_create_plist(did); + paid = H5.H5Dget_access_plist(did); + long storageSize = H5.H5Dget_storage_size(did); + int nfilt = H5.H5Pget_nfilters(pcid); + int layoutType = H5.H5Pget_layout(pcid); + + storageLayout.setLength(0); + compression.setLength(0); + + if (layoutType == HDF5Constants.H5D_CHUNKED) { + chunkSize = new long[rank]; + H5.H5Pget_chunk(pcid, rank, chunkSize); + int n = chunkSize.length; + storageLayout.append("CHUNKED: ").append(chunkSize[0]); + for (int i = 1; i < n; i++) { + storageLayout.append(" X ").append(chunkSize[i]); + } + + if (nfilt > 0) { + long nelmts = 1; + long uncompSize; + long datumSize = getDatatype().getDatatypeSize(); + + if (datumSize < 0) { + long tmptid = -1; + try { + tmptid = H5.H5Dget_type(did); + datumSize = H5.H5Tget_size(tmptid); + } + finally { + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex2) { + log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2); + } + } + } + + for (int i = 0; i < rank; i++) { + nelmts *= dims[i]; + } + uncompSize = nelmts * datumSize; + + /* compression ratio = uncompressed size / compressed size */ + + if (storageSize != 0) { + double ratio = (double) uncompSize / (double) storageSize; + DecimalFormat df = new DecimalFormat(); + df.setMinimumFractionDigits(3); + df.setMaximumFractionDigits(3); + compression.append(df.format(ratio)).append(":1"); + } + } + } + else if (layoutType == HDF5Constants.H5D_COMPACT) { + storageLayout.append("COMPACT"); + } + else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) { + storageLayout.append("CONTIGUOUS"); + if (H5.H5Pget_external_count(pcid) > 0) + storageLayout.append(" - EXTERNAL "); + } + else if (layoutType == HDF5Constants.H5D_VIRTUAL) { + storageLayout.append("VIRTUAL - "); + try { + long vmaps = H5.H5Pget_virtual_count(pcid); + try { + int virtView = H5.H5Pget_virtual_view(paid); + long virtGap = H5.H5Pget_virtual_printf_gap(paid); + if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING) + storageLayout.append("First Missing"); + else + storageLayout.append("Last Available"); + storageLayout.append("\nGAP : " + virtGap); + } + catch (Exception err) { + log.debug("getMetadata(): vds error: ", err); + storageLayout.append("ERROR"); + } + + storageLayout.append("\nMAPS : " + vmaps); + if (vmaps > 0) { + for (long next = 0; next < vmaps; next++) { + try { + H5.H5Pget_virtual_vspace(pcid, next); + H5.H5Pget_virtual_srcspace(pcid, next); + String fname = H5.H5Pget_virtual_filename(pcid, next); + String dsetname = H5.H5Pget_virtual_dsetname(pcid, next); + storageLayout.append("\n").append(fname).append(" : ").append(dsetname); + } + catch (Exception err) { + log.debug("getMetadata(): vds space[{}] error: ", next, err); + log.trace("getMetadata(): vds[{}] continue", next); + storageLayout.append("ERROR"); + } + } + } + } + catch (Exception err) { + log.debug("getMetadata(): vds count error: ", err); + storageLayout.append("ERROR"); + } + } + else { + chunkSize = null; + storageLayout.append("NONE"); + } + + int[] flags = { 0, 0 }; + long[] cdNelmts = { 20 }; + int[] cdValues = new int[(int) cdNelmts[0]]; + String[] cdName = { "", "" }; + log.trace("getMetadata(): {} filters in pipeline", nfilt); + int filter = -1; + int[] filterConfig = { 1 }; + + filters.setLength(0); + + if (nfilt == 0) { + filters.append("NONE"); + } + else { + for (int i = 0, k = 0; i < nfilt; i++) { + log.trace("getMetadata(): filter[{}]", i); + if (i > 0) { + filters.append(", "); + } + if (k > 0) { + compression.append(", "); + } + + try { + cdNelmts[0] = 20; + cdValues = new int[(int) cdNelmts[0]]; + cdValues = new int[(int) cdNelmts[0]]; + filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig); + log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]); + for (int j = 0; j < cdNelmts[0]; j++) { + log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]); + } + } + catch (Exception err) { + log.debug("getMetadata(): filter[{}] error: ", i, err); + log.trace("getMetadata(): filter[{}] continue", i); + filters.append("ERROR"); + continue; + } + + if (filter == HDF5Constants.H5Z_FILTER_NONE) { + filters.append("NONE"); + } + else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) { + filters.append("GZIP"); + compression.append(COMPRESSION_GZIP_TXT + cdValues[0]); + k++; + } + else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) { + filters.append("Error detection filter"); + } + else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) { + filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]); + } + else if (filter == HDF5Constants.H5Z_FILTER_NBIT) { + filters.append("NBIT"); + } + else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) { + filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]); + } + else if (filter == HDF5Constants.H5Z_FILTER_SZIP) { + filters.append("SZIP"); + compression.append("SZIP: Pixels per block = ").append(cdValues[1]); + k++; + int flag = -1; + try { + flag = H5.H5Zget_filter_info(filter); + } + catch (Exception ex) { + log.debug("getMetadata(): H5Zget_filter_info failure: ", ex); + flag = -1; + } + if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) { + compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED"); + } + else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) + || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) { + compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED"); + } + } + else { + filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): "); + for (int j = 0; j < cdNelmts[0]; j++) { + if (j > 0) + filters.append(", "); + filters.append(cdValues[j]); + } + log.debug("getMetadata(): filter[{}] is user defined compression", i); + } + } // (int i=0; i= 0) { + try { + H5.H5Adelete(did, attr.getName()); + List attrList = getMetadata(); + attrList.remove(attr); + nAttributes = attrList.size(); + } + finally { + close(did); + } + } + log.trace("removeMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.DataFormat#updateMetadata(java.lang.Object) + */ + @Override + public void updateMetadata(Object info) throws HDF5Exception { + log.trace("updateMetadata(): start"); + // only attribute metadata is supported. + if (!(info instanceof Attribute)) { + log.debug("updateMetadata(): Object not an Attribute"); + log.trace("updateMetadata(): finish"); + return; + } + + nAttributes = -1; + log.trace("updateMetadata(): finish"); + } + + /* + * (non-Javadoc) + * + * @see hdf.object.HObject#setName(java.lang.String) + */ + @Override + public void setName(String newName) throws Exception { + H5File.renameObject(this, newName); + super.setName(newName); + } + + /** + * Resets selection of dataspace + */ + private void resetSelection() { + log.trace("resetSelection(): start"); + + for (int i = 0; i < rank; i++) { + startDims[i] = 0; + selectedDims[i] = 1; + if (selectedStride != null) { + selectedStride[i] = 1; + } + } + + if (interlace == INTERLACE_PIXEL) { + // 24-bit TRUE color image + // [height][width][pixel components] + selectedDims[2] = 3; + selectedDims[0] = dims[0]; + selectedDims[1] = dims[1]; + selectedIndex[0] = 0; // index for height + selectedIndex[1] = 1; // index for width + selectedIndex[2] = 2; // index for depth + } + else if (interlace == INTERLACE_PLANE) { + // 24-bit TRUE color image + // [pixel components][height][width] + selectedDims[0] = 3; + selectedDims[1] = dims[1]; + selectedDims[2] = dims[2]; + selectedIndex[0] = 1; // index for height + selectedIndex[1] = 2; // index for width + selectedIndex[2] = 0; // index for depth + } + else if (rank == 1) { + selectedIndex[0] = 0; + selectedDims[0] = dims[0]; + } + else if (rank == 2) { + selectedIndex[0] = 0; + selectedIndex[1] = 1; + selectedDims[0] = dims[0]; + selectedDims[1] = dims[1]; + } + else if (rank > 2) { + // // hdf-java 2.5 version: 3D dataset is arranged in the order of + // [frame][height][width] by default + // selectedIndex[1] = rank-1; // width, the fastest dimension + // selectedIndex[0] = rank-2; // height + // selectedIndex[2] = rank-3; // frames + + // + // (5/4/09) Modified the default dimension order. See bug#1379 + // We change the default order to the following. In most situation, + // users want to use the natural order of + // selectedIndex[0] = 0 + // selectedIndex[1] = 1 + // selectedIndex[2] = 2 + // Most of NPOESS data is the the order above. + + if (isImage) { + // 3D dataset is arranged in the order of [frame][height][width] + selectedIndex[1] = rank - 1; // width, the fastest dimension + selectedIndex[0] = rank - 2; // height + selectedIndex[2] = rank - 3; // frames + } + else { + selectedIndex[0] = 0; // width, the fastest dimension + selectedIndex[1] = 1; // height + selectedIndex[2] = 2; // frames + } + + selectedDims[selectedIndex[0]] = dims[selectedIndex[0]]; + selectedDims[selectedIndex[1]] = dims[selectedIndex[1]]; + selectedDims[selectedIndex[2]] = dims[selectedIndex[2]]; + } + + isDataLoaded = false; + + if ((rank > 1) && (selectedIndex[0] > selectedIndex[1])) + isDefaultImageOrder = false; + else + isDefaultImageOrder = true; + + log.trace("resetSelection(): finish"); + } + + public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, + long[] chunks, int gzip, Object data) throws Exception { + return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data); + } + + /** + * Creates a scalar dataset in a file with/without chunking and compression. + *

+ * The following example shows how to create a string dataset using this function. + * + *

+     * H5File file = new H5File("test.h5", H5File.CREATE);
+     * int max_str_len = 120;
+     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
+     * int size = 10000;
+     * long dims[] = { size };
+     * long chunks[] = { 1000 };
+     * int gzip = 9;
+     * String strs[] = new String[size];
+     *
+     * for (int i = 0; i < size; i++)
+     *     strs[i] = String.valueOf(i);
+     *
+     * file.open();
+     * file.createScalarDS("/1D scalar strings", null, strType, dims, null, chunks, gzip, strs);
+     *
+     * try {
+     *     file.close();
+     * }
+     * catch (Exception ex) {
+     * }
+     * 
+ * + * @param name + * the name of the dataset to create. + * @param pgroup + * parent group where the new dataset is created. + * @param type + * the datatype of the dataset. + * @param dims + * the dimension size of the dataset. + * @param maxdims + * the max dimension size of the dataset. maxdims is set to dims if maxdims = null. + * @param chunks + * the chunk size of the dataset. No chunking if chunk = null. + * @param gzip + * GZIP compression level (1 to 9). No compression if gzip<=0. + * @param fillValue + * the default data value. + * @param data + * the array of data values. + * + * @return the new scalar dataset if successful; otherwise returns null. + * + * @throws Exception if there is a failure. + */ + public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, + long[] chunks, int gzip, Object fillValue, Object data) throws Exception { + log.trace("create(): start"); + + H5ScalarDS dataset = null; + String fullPath = null; + long did = -1; + long plist = -1; + long sid = -1; + long tid = -1; + + if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) { + log.debug("create(): one or more parameters are null"); + log.trace("create(): exit"); + return null; + } + + H5File file = (H5File) pgroup.getFileFormat(); + if (file == null) { + log.debug("create(): parent group FileFormat is null"); + log.trace("create(): exit"); + return null; + } + + String path = HObject.SEPARATOR; + if (!pgroup.isRoot()) { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + if (name.endsWith("/")) { + name = name.substring(0, name.length() - 1); + } + int idx = name.lastIndexOf('/'); + if (idx >= 0) { + name = name.substring(idx + 1); + } + } + + fullPath = path + name; + log.trace("create(): fullPath={}", fullPath); + + // setup chunking and compression + boolean isExtentable = false; + if (maxdims != null) { + for (int i = 0; i < maxdims.length; i++) { + if (maxdims[i] == 0) { + maxdims[i] = dims[i]; + } + else if (maxdims[i] < 0) { + maxdims[i] = HDF5Constants.H5S_UNLIMITED; + } + + if (maxdims[i] != dims[i]) { + isExtentable = true; + } + } + } + + // HDF5 requires you to use chunking in order to define extendible + // datasets. Chunking makes it possible to extend datasets efficiently, + // without having to reorganize storage excessively. Using default size + // of 64x...which has good performance + if ((chunks == null) && isExtentable) { + chunks = new long[dims.length]; + for (int i = 0; i < dims.length; i++) + chunks[i] = Math.min(dims[i], 64); + } + + // prepare the dataspace and datatype + int rank = dims.length; + log.trace("create(): rank={}", rank); + + if ((tid = type.createNative()) >= 0) { + log.trace("create(): createNative={}", tid); + try { + sid = H5.H5Screate_simple(rank, dims, maxdims); + log.trace("create(): H5Screate_simple={}", sid); + + // figure out creation properties + plist = HDF5Constants.H5P_DEFAULT; + + byte[] valFill = null; + try { + valFill = parseFillValue(type, fillValue); + } + catch (Exception ex) { + log.debug("create(): parse fill value: ", ex); + } + log.trace("create(): parseFillValue={}", valFill); + + if (chunks != null || valFill != null) { + plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + + if (chunks != null) { + H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED); + H5.H5Pset_chunk(plist, rank, chunks); + + // compression requires chunking + if (gzip > 0) { + H5.H5Pset_deflate(plist, gzip); + } + } + + if (valFill != null) { + H5.H5Pset_fill_value(plist, tid, valFill); + } + } + + long fid = file.getFID(); + + log.trace("create(): create dataset fid={}", fid); + did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT); + log.trace("create(): create dataset did={}", did); + dataset = new H5ScalarDS(file, name, path); + } + finally { + try { + H5.H5Pclose(plist); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex); + } + try { + H5.H5Sclose(sid); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex); + } + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Dclose(did); + } + catch (HDF5Exception ex) { + log.debug("create(): H5Dclose(did {}) failure: ", did, ex); + } + } + } + + log.trace("create(): dataset created"); + + if (dataset != null) { + pgroup.addToMemberList(dataset); + if (data != null) { + dataset.init(); + long[] selected = dataset.getSelectedDims(); + for (int i = 0; i < rank; i++) { + selected[i] = dims[i]; + } + dataset.write(data); + } + } + log.trace("create(): finish"); + return dataset; + } + + // check _FillValue, valid_min, valid_max, and valid_range + private void checkCFconvention(long oid) throws Exception { + log.trace("checkCFconvention(): start"); + + Object avalue = getAttrValue(oid, "_FillValue"); + + if (avalue != null) { + int n = Array.getLength(avalue); + for (int i = 0; i < n; i++) + addFilteredImageValue((Number) Array.get(avalue, i)); + } + + if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) { + double x0 = 0; + double x1 = 0; + avalue = getAttrValue(oid, "valid_range"); + if (avalue != null) { + try { + x0 = Double.parseDouble(Array.get(avalue, 0).toString()); + x1 = Double.parseDouble(Array.get(avalue, 1).toString()); + imageDataRange = new double[2]; + imageDataRange[0] = x0; + imageDataRange[1] = x1; + return; + } + catch (Exception ex) { + log.debug("checkCFconvention(): valid_range: ", ex); + } + } + + avalue = getAttrValue(oid, "valid_min"); + if (avalue != null) { + try { + x0 = Double.parseDouble(Array.get(avalue, 0).toString()); + } + catch (Exception ex) { + log.debug("checkCFconvention(): valid_min: ", ex); + } + avalue = getAttrValue(oid, "valid_max"); + if (avalue != null) { + try { + x1 = Double.parseDouble(Array.get(avalue, 0).toString()); + imageDataRange = new double[2]; + imageDataRange[0] = x0; + imageDataRange[1] = x1; + } + catch (Exception ex) { + log.debug("checkCFconvention(): valid_max:", ex); + } + } + } + } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0]) + log.trace("checkCFconvention(): finish"); + } + + private Object getAttrValue(long oid, String aname) { + log.trace("getAttrValue(): start: name={}", aname); + + long aid = -1; + long atid = -1; + long asid = -1; + Object avalue = null; + + try { + // try to find attribute name + if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT)) + aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (HDF5LibraryException ex5) { + log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname); + } + catch (Exception ex) { + log.debug("getAttrValue(): try to find attribute {}:", aname, ex); + } + if (aid > 0) { + try { + atid = H5.H5Aget_type(aid); + long tmptid = atid; + atid = H5.H5Tget_native_type(tmptid); + try { + H5.H5Tclose(tmptid); + } + catch (Exception ex) { + log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex); + } + H5Datatype dsDatatype = new H5Datatype(atid); + + asid = H5.H5Aget_space(aid); + long adims[] = null; + + int arank = H5.H5Sget_simple_extent_ndims(asid); + if (arank > 0) { + adims = new long[arank]; + H5.H5Sget_simple_extent_dims(asid, adims, null); + } + log.trace("getAttrValue(): adims={}", adims); + + // retrieve the attribute value + long lsize = 1; + if (adims != null) { + for (int j = 0; j < adims.length; j++) { + lsize *= adims[j]; + } + } + log.trace("getAttrValue(): lsize={}", lsize); + + if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size"); + + try { + avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize); + } + catch (OutOfMemoryError e) { + log.debug("getAttrValue(): out of memory: ", e); + avalue = null; + } + + if (avalue != null) { + log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize); + H5.H5Aread(aid, atid, avalue); + + if (dsDatatype.isUnsigned()) { + log.trace("getAttrValue(): id {} is unsigned", atid); + avalue = convertFromUnsignedC(avalue, null); + } + } + } + catch (Exception ex) { + log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex); + } + finally { + try { + H5.H5Tclose(atid); + } + catch (HDF5Exception ex) { + log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex); + } + try { + H5.H5Sclose(asid); + } + catch (HDF5Exception ex) { + log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex); + } + try { + H5.H5Aclose(aid); + } + catch (HDF5Exception ex) { + log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex); + } + } + } // (aid > 0) + + log.trace("getAttrValue(): finish"); + return avalue; + } + + private boolean isStringAttributeOf(long objID, String name, String value) { + boolean retValue = false; + long aid = -1; + long atid = -1; + + try { + if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) { + aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + atid = H5.H5Aget_type(aid); + int size = (int)H5.H5Tget_size(atid); + byte[] attrValue = new byte[size]; + H5.H5Aread(aid, atid, attrValue); + String strValue = new String(attrValue).trim(); + retValue = strValue.equalsIgnoreCase(value); + } + } + catch (Exception ex) { + log.debug("isStringAttributeOf(): try to find out interlace mode:", ex); + } + finally { + try { + H5.H5Tclose(atid); + } + catch (HDF5Exception ex) { + log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex); + } + try { + H5.H5Aclose(aid); + } + catch (HDF5Exception ex) { + log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex); + } + } + + return retValue; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) + */ + @Override + public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { + log.trace("copy(): start"); + // must give a location to copy + if (pgroup == null) { + log.debug("copy(): Parent group is null"); + log.trace("copy(): finish"); + return null; + } + + Dataset dataset = null; + long srcdid = -1; + long dstdid = -1; + long plist = -1; + long tid = -1; + long sid = -1; + String dname = null; + String path = null; + + if (pgroup.isRoot()) { + path = HObject.SEPARATOR; + } + else { + path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; + } + dname = path + dstName; + + srcdid = open(); + if (srcdid >= 0) { + try { + tid = H5.H5Dget_type(srcdid); + sid = H5.H5Screate_simple(dims.length, dims, null); + plist = H5.H5Dget_create_plist(srcdid); + + long[] chunks = new long[dims.length]; + boolean setChunkFlag = false; + try { + H5.H5Pget_chunk(plist, dims.length, chunks); + for (int i = 0; i < dims.length; i++) { + if (dims[i] < chunks[i]) { + setChunkFlag = true; + if (dims[i] == 1) + chunks[i] = 1; + else + chunks[i] = dims[i] / 2; + } + } + } + catch (Exception ex) { + log.debug("copy(): chunk: ", ex); + } + + if (setChunkFlag) + H5.H5Pset_chunk(plist, dims.length, chunks); + + try { + dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + log.debug("copy(): H5Dcreate: ", e); + } + finally { + try { + H5.H5Dclose(dstdid); + } + catch (Exception ex2) { + log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2); + } + } + + dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); + if (buff != null) { + dataset.init(); + dataset.write(buff); + } + + dstdid = dataset.open(); + if (dstdid >= 0) { + try { + H5File.copyAttributes(srcdid, dstdid); + } + finally { + try { + H5.H5Dclose(dstdid); + } + catch (Exception ex) { + log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex); + } + } + } + } + finally { + try { + H5.H5Pclose(plist); + } + catch (Exception ex) { + log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex); + } + try { + H5.H5Sclose(sid); + } + catch (Exception ex) { + log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex); + } + try { + H5.H5Tclose(tid); + } + catch (Exception ex) { + log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex); + } + try { + H5.H5Dclose(srcdid); + } + catch (Exception ex) { + log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex); + } + } + } + + pgroup.addToMemberList(dataset); + + if (dataset != null) + ((ScalarDS) dataset).setIsImage(isImage); + + log.trace("copy(): finish"); + return dataset; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.ScalarDS#getPalette() + */ + @Override + public byte[][] getPalette() { + if (palette == null) { + palette = readPalette(0); + } + + return palette; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.ScalarDS#getPaletteName(int) + */ + @Override + public String getPaletteName(int idx) { + log.trace("getPaletteName(): start"); + + byte[] refs = getPaletteRefs(); + long did = -1; + long palID = -1; + String paletteName = null; + + if (refs == null) { + log.debug("getPaletteName(): refs is null"); + log.trace("getPaletteName(): exit"); + return null; + } + + byte[] refBuf = new byte[8]; + + try { + System.arraycopy(refs, idx * 8, refBuf, 0, 8); + } + catch (Exception err) { + log.debug("getPaletteName(): arraycopy failure: ", err); + log.trace("getPaletteName(): finish"); + return null; + } + + did = open(); + if (did >= 0) { + try { + palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf); + paletteName = H5.H5Iget_name(palID); + } + catch (Exception ex) { + ex.printStackTrace(); + } + finally { + close(palID); + close(did); + } + } + + log.trace("getPaletteName(): finish"); + return paletteName; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.ScalarDS#readPalette(int) + */ + @Override + public byte[][] readPalette(int idx) { + log.trace("readPalette(): start"); + + byte[][] thePalette = null; + byte[] refs = getPaletteRefs(); + long did = -1; + long palID = -1; + long tid = -1; + + if (refs == null) { + log.debug("readPalette(): refs is null"); + log.trace("readPalette(): finish"); + return null; + } + + byte[] p = null; + byte[] refBuf = new byte[8]; + + try { + System.arraycopy(refs, idx * 8, refBuf, 0, 8); + } + catch (Exception err) { + log.debug("readPalette(): arraycopy failure: ", err); + log.trace("readPalette(): failure"); + return null; + } + + did = open(); + if (did >= 0) { + try { + palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf); + log.trace("readPalette(): H5Rdereference: {}", palID); + tid = H5.H5Dget_type(palID); + + // support only 3*256 byte palette data + if (H5.H5Dget_storage_size(palID) <= 768) { + p = new byte[3 * 256]; + H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p); + } + } + catch (HDF5Exception ex) { + log.debug("readPalette(): failure: ", ex); + p = null; + } + finally { + try { + H5.H5Tclose(tid); + } + catch (HDF5Exception ex2) { + log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2); + } + close(palID); + close(did); + } + } + + if (p != null) { + thePalette = new byte[3][256]; + for (int i = 0; i < 256; i++) { + thePalette[0][i] = p[i * 3]; + thePalette[1][i] = p[i * 3 + 1]; + thePalette[2][i] = p[i * 3 + 2]; + } + } + + log.trace("readPalette(): finish"); + return thePalette; + } + + private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception { + log.trace("parseFillValue(): start"); + + byte[] data = null; + + if (type == null || fillValue == null) { + log.debug("parseFillValue(): datatype or fill value is null"); + log.trace("parseFillValue(): exit"); + return null; + } + + int datatypeClass = type.getDatatypeClass(); + int datatypeSize = (int)type.getDatatypeSize(); + + double valDbl = 0; + String valStr = null; + + if (fillValue instanceof String) { + valStr = (String) fillValue; + } + else if (fillValue.getClass().isArray()) { + valStr = Array.get(fillValue, 0).toString(); + } + + if (!type.isString()) { + try { + valDbl = Double.parseDouble(valStr); + } + catch (NumberFormatException ex) { + log.debug("parseFillValue(): parse error: ", ex); + log.trace("parseFillValue(): finish"); + return null; + } + } + + try { + switch (datatypeClass) { + case Datatype.CLASS_INTEGER: + case Datatype.CLASS_ENUM: + case Datatype.CLASS_CHAR: + log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR"); + if (datatypeSize == 1) { + data = new byte[] { (byte) valDbl }; + } + else if (datatypeSize == 2) { + data = HDFNativeData.shortToByte((short) valDbl); + } + else if (datatypeSize == 8) { + data = HDFNativeData.longToByte((long) valDbl); + } + else { + data = HDFNativeData.intToByte((int) valDbl); + } + break; + case Datatype.CLASS_FLOAT: + log.trace("parseFillValue(): class CLASS_FLOAT"); + if (datatypeSize == 8) { + data = HDFNativeData.doubleToByte(valDbl); + } + else { + data = HDFNativeData.floatToByte((float) valDbl); + } + break; + case Datatype.CLASS_STRING: + log.trace("parseFillValue(): class CLASS_STRING"); + if (valStr != null) + data = valStr.getBytes(); + break; + case Datatype.CLASS_REFERENCE: + log.trace("parseFillValue(): class CLASS_REFERENCE"); + data = HDFNativeData.longToByte((long) valDbl); + break; + default: + log.debug("parseFillValue(): datatypeClass unknown"); + break; + } // (datatypeClass) + } + catch (Exception ex) { + log.debug("parseFillValue(): failure: ", ex); + data = null; + } + + log.trace("parseFillValue(): finish"); + return data; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.ScalarDS#getPaletteRefs() + */ + @Override + public byte[] getPaletteRefs() { + if (!isInited()) + init(); // init will be called to get refs + + return paletteRefs; + } + + /** + * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array + * length is 8*numberOfPalettes. + */ + private byte[] getPaletteRefs(long did) { + log.trace("getPaletteRefs(): start"); + + long aid = -1; + long sid = -1; + long atype = -1; + int size = 0; + int rank = 0; + byte[] refbuf = null; + + try { + if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) { + aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + sid = H5.H5Aget_space(aid); + rank = H5.H5Sget_simple_extent_ndims(sid); + size = 1; + if (rank > 0) { + long[] dims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, dims, null); + log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims); + for (int i = 0; i < rank; i++) { + size *= (int) dims[i]; + } + } + + if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE) throw new HDF5Exception("Invalid int size"); + + refbuf = new byte[size * 8]; + atype = H5.H5Aget_type(aid); + + H5.H5Aread(aid, atype, refbuf); + } + } + catch (HDF5Exception ex) { + log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex); + refbuf = null; + } + finally { + try { + H5.H5Tclose(atype); + } + catch (HDF5Exception ex2) { + log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2); + } + try { + H5.H5Sclose(sid); + } + catch (HDF5Exception ex2) { + log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2); + } + try { + H5.H5Aclose(aid); + } + catch (HDF5Exception ex2) { + log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2); + } + } + + log.trace("getPaletteRefs(): finish"); + return refbuf; + } + + /** + * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality + * of size is the same as that of the dataspace of the dataset being changed. + * + * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset + * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see + * H5Screate_simple) + * + * @param newDims the dimension target size + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + */ + public void extend(long[] newDims) throws HDF5Exception { + long did = -1; + long sid = -1; + + did = open(); + if (did >= 0) { + try { + H5.H5Dset_extent(did, newDims); + H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL); + sid = H5.H5Dget_space(did); + long[] checkDims = new long[rank]; + H5.H5Sget_simple_extent_dims(sid, checkDims, null); + log.trace("extend(): rank={}, checkDims={}", rank, checkDims); + for (int i = 0; i < rank; i++) { + if (checkDims[i] != newDims[i]) { + log.debug("extend(): error extending dataset"); + throw new HDF5Exception("error extending dataset " + getName()); + } + } + dims = checkDims; + } + catch (Exception e) { + log.debug("extend(): failure: ", e); + throw new HDF5Exception(e.getMessage()); + } + finally { + if (sid > 0) + H5.H5Sclose(sid); + + close(did); + } + } + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#isVirtual() + */ + @Override + public boolean isVirtual() { + return isVirtual; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getVirtualFilename(int) + */ + @Override + public String getVirtualFilename(int index) { + if(isVirtual) + return virtualNameList.get(index); + else + return null; + } + + /* + * (non-Javadoc) + * + * @see hdf.object.Dataset#getVirtualMaps() + */ + @Override + public int getVirtualMaps() { + if(isVirtual) + return virtualNameList.size(); + else + return -1; + } +} diff --git a/src/main/java/hdf/object/h5/H5Utils.java b/src/main/java/hdf/object/h5/H5Utils.java new file mode 100644 index 0000000..65e3ff5 --- /dev/null +++ b/src/main/java/hdf/object/h5/H5Utils.java @@ -0,0 +1,135 @@ +/***************************************************************************** + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of the HDF Java Products distribution. * + * The full copyright notice, including terms governing use, modification, * + * and redistribution, is contained in the files COPYING and Copyright.html. * + * COPYING can be found at the root of the source code distribution tree. * + * Or, see https://support.hdfgroup.org/products/licenses.html * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + ****************************************************************************/ + +package hdf.object.h5; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.exceptions.HDF5Exception; + +public final class H5Utils { + + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Utils.class); + + /** + * Set up a hyperslab selection within a dataset. + * + * @param did + * IN dataset ID + * @param dsetDims + * IN dimensions + * @param startDims + * IN start dimensions + * @param selectedStride + * IN selected stride values + * @param selectedDims + * IN selected dimensions + * @param spaceIDs + * IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace + * + * @return total number of data points selected + * + * @throws HDF5Exception + * If there is an error at the HDF5 library level. + */ + public static final long selectHyperslab(long did, long[] dsetDims, long[] startDims, long[] selectedStride, + long[] selectedDims, long[] spaceIDs) throws HDF5Exception { + log.trace("selectHyperslab(): start"); + + if (dsetDims == null) { + log.debug("selectHyperslab(): dsetDims is null"); + return -1; + } + + int rank = dsetDims.length; + if ((startDims != null) && (startDims.length != rank)) { + log.debug("selectHyperslab(): startDims rank didn't match dsetDims rank"); + return -1; + } + if ((selectedStride != null) && (selectedStride.length != rank)) { + log.debug("selectHyperslab(): selectedStride rank didn't match startDims rank"); + return -1; + } + if ((selectedDims != null) && (selectedDims.length != rank)) { + log.debug("selectHyperslab(): selectedDims rank didn't match startDims rank"); + return -1; + } + + long lsize = 1; + + boolean isAllSelected = true; + for (int i = 0; i < rank; i++) { + if (selectedDims != null) { + lsize *= selectedDims[i]; + if (selectedDims[i] < dsetDims[i]) { + isAllSelected = false; + } + } + } + + log.trace("selectHyperslab(): isAllSelected={}", isAllSelected); + + if (isAllSelected) { + spaceIDs[0] = HDF5Constants.H5S_ALL; + spaceIDs[1] = HDF5Constants.H5S_ALL; + } + else { + spaceIDs[1] = H5.H5Dget_space(did); + + // When a 1D dataspace is used for a chunked dataset, reading is very slow. + // + // It is a known problem within the HDF5 library. + // mspace = H5.H5Screate_simple(1, lsize, null); + spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null); + H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); + } + + log.trace("selectHyperslab(): finish"); + + return lsize; + } + + public static final long getTotalSelectedSpacePoints(long did, long[] dsetDims, long[] startDims, + long[] selectedStride, long[] selectedDims, long[] spaceIDs) throws HDF5Exception { + long totalSelectedSpacePoints = selectHyperslab(did, dsetDims, startDims, selectedStride, selectedDims, spaceIDs); + + log.trace("getTotalSelectedSpacePoints(): selected {} points in dataset's dataspace", totalSelectedSpacePoints); + + if (totalSelectedSpacePoints == 0) { + log.debug("getTotalSelectedSpacePoints(): No data to read. Dataset or selected subset is empty."); + log.trace("getTotalSelectedSpacePoints(): finish"); + throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty."); + } + + if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) { + log.debug("getTotalSelectedSpacePoints(): totalSelectedSpacePoints outside valid Java int range; unsafe cast"); + log.trace("getTotalSelectedSpacePoints(): finish"); + throw new HDF5Exception("Invalid int size"); + } + + if (log.isDebugEnabled()) { + // check is storage space is allocated + try { + long ssize = H5.H5Dget_storage_size(did); + log.trace("getTotalSelectedSpacePoints(): Storage space allocated = {} bytes", ssize); + } + catch (Exception ex) { + log.debug("getTotalSelectedSpacePoints(): check if storage space is allocated:", ex); + } + } + + return totalSelectedSpacePoints; + } + +}