rework of the plugin to work with the "latest" hdf5 version - its quite a hack

This commit is contained in:
ebner 2020-10-12 15:09:17 +02:00
parent 58407a97af
commit 8082cdb737
37 changed files with 19172 additions and 261 deletions

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry including="**/*.java" kind="src" path="src/main/resources"/>
<classpathentry including="**/*.java" kind="src" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

1
.gitignore vendored
View File

@ -4,3 +4,4 @@ imagej.hdf5.iml
/target
.gradle
build
out/

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ch.psi.imagej.hdf5</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>ch.acanda.eclipse.pmd.builder.PMDBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>ch.acanda.eclipse.pmd.builder.PMDNature</nature>
</natures>
</projectDescription>

View File

@ -1,3 +0,0 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/test/java=UTF-8

View File

@ -1,5 +0,0 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.7

View File

@ -1,4 +0,0 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

View File

@ -2,7 +2,7 @@ apply plugin: 'java'
//apply plugin: 'maven'
group = 'ch.psi'
version = '0.12.0'
version = '0.13.0'
description = """"""
@ -10,10 +10,13 @@ sourceCompatibility = 1.8
targetCompatibility = 1.8
repositories {
mavenCentral()
maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
mavenCentral()
maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
maven { url "http://maven.imagej.net/content/repositories/public/" }
flatDir {
dirs 'lib'
}
}
// define a provided scope
@ -24,14 +27,20 @@ configurations {
dependencies {
compile group: 'hdf5', name: 'hdf', version:'2.10.0'
compile group: 'hdf5', name: 'hdfobj', version:'2.10.0'
compile group: 'hdf5', name: 'hdf5', version:'2.10.0'
compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0'
compile group: 'org.slf4j', name: 'slf4j-api', version:'1.7.6'
testCompile group: 'junit', name: 'junit', version:'4.11'
compile name: 'sis-jhdf5-19.04.0'
compile name: 'sis-base-18.09.0'
compile name: 'commons-io-2.6'
// compile name: 'sis-base-18.09.0'
// compile group: 'cisd', name: 'jhdf5', version: '14.12.6'
// compile group: 'hdf5', name: 'hdf', version:'2.10.0'
// compile group: 'hdf5', name: 'hdfobj', version:'2.10.0'
// compile group: 'hdf5', name: 'hdf5', version:'2.10.0'
// compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0'
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.6'
testCompile group: 'junit', name: 'junit', version: '4.11'
provided group: 'net.imagej', name: 'ij', version:'1.53e'
provided group: 'net.imagej', name: 'ij', version: '1.53e'
}
task fatJar(type: Jar) {
@ -41,12 +50,11 @@ task fatJar(type: Jar) {
// 'Main-Class': 'ch.psi.caview.CaView'
// }
archiveBaseName = 'HDF5_Viewer'
from { (configurations.compile- configurations.provided).collect { it.isDirectory() ? it : zipTree(it) } }
from { (configurations.compile - configurations.provided).collect { it.isDirectory() ? it : zipTree(it) } }
with jar
// { exclude group: "net.imagej", name:'ij'}
}
//
//task distributionZip(type: Zip, dependsOn: [jar]) {
//task distributionZip(type: Zip, dependsOn: [fatJar]) {
// baseName "${project.group}-jhdf5"
//
// from('targets/dist') {

BIN
lib/commons-io-2.6.jar Normal file

Binary file not shown.

BIN
lib/sis-base-18.09.0.jar Normal file

Binary file not shown.

BIN
lib/sis-jhdf5-19.04.0.jar Normal file

Binary file not shown.

77
pom.xml
View File

@ -1,77 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId>
<version>0.12.0</version>
<dependencies>
<dependency>
<groupId>gov.nih.imagej</groupId>
<artifactId>imagej</artifactId>
<version>1.46</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdf</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdfobj</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdf5</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdf5obj</artifactId>
<version>2.10.0</version>
</dependency>
<!-- The HDF5 libraries -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.6</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<encoding>UTF-8</encoding>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4</version>
<configuration>
<finalName>HDF5_Viewer-${pom.version}</finalName>
<appendAssemblyId>false</appendAssemblyId>
<archive />
<descriptors>
<descriptor>src/main/assembly/assembly_jar.xml</descriptor>
<descriptor>src/main/assembly/assembly.xml</descriptor>
</descriptors>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<profiles>
<profile>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</repository>
<repository>
<snapshots />
<id>snapshots</id>
<name>libs-snapshots</name>
<url>http://artifacts.psi.ch/artifactory/libs-snapshots</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
<pluginRepository>
<snapshots />
<id>snapshots</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
</pluginRepositories>
<id>artifactory</id>
</profile>
</profiles>
<activeProfiles>
<activeProfile>artifactory</activeProfile>
</activeProfiles>
</settings>

View File

@ -3,7 +3,7 @@ package ch.psi.imagej.hdf5;
import java.util.ArrayList;
import java.util.List;
import ncsa.hdf.object.Dataset;
import hdf.object.Dataset;
public class DatasetSelection {

View File

@ -1,5 +1,9 @@
package ch.psi.imagej.hdf5;
import hdf.hdf5lib.exceptions.HDF5Exception;
import hdf.object.Dataset;
import hdf.object.Datatype;
import hdf.object.h5.H5File;
import ij.IJ;
import ij.ImagePlus;
import ij.CompositeImage;
@ -14,10 +18,6 @@ import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import ncsa.hdf.object.*;
import ncsa.hdf.object.h5.*;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5Reader implements PlugIn {
private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName());
@ -139,7 +139,7 @@ public class HDF5Reader implements PlugIn {
int numberOfDimensions = var.getRank();
long[] dimensions= var.getDims();
logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDatatypeDescription());
logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDescription());
// Read dataset

View File

@ -11,40 +11,40 @@ import java.util.regex.Pattern;
import javax.swing.tree.DefaultMutableTreeNode;
import ncsa.hdf.object.Attribute;
import ncsa.hdf.object.Dataset;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.Group;
import ncsa.hdf.object.HObject;
import ncsa.hdf.object.h5.H5File;
import hdf.object.Attribute;
import hdf.object.Dataset;
import hdf.object.FileFormat;
import hdf.object.Group;
import hdf.object.HObject;
import hdf.object.h5.H5File;
public class HDF5Utilities {
private static final Logger logger = Logger.getLogger(HDF5Utilities.class.getName());
/**
* Get attributes from object
* @param object Object to retrieve the attributes from
* @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
*/
public static Map<String,Attribute> getAttributes(HObject object) {
Objects.requireNonNull(object);
Map<String, Attribute> attributes = new HashMap<>();
try{
for(Object m: object.getMetadata()){
if(m instanceof Attribute){
attributes.put(((Attribute) m).getName(), (Attribute) m);
}
}
}
catch(Exception e){
logger.warning("Unable to retrieve metadata from object");
return null;
}
return attributes;
}
// /**
// * Get attributes from object
// * @param object Object to retrieve the attributes from
// * @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
// */
// public static Map<String,Attribute> getAttributes(HObject object) {
// Objects.requireNonNull(object);
//
// Map<String, Attribute> attributes = new HashMap<>();
// try{
// for(Object m: object.getMetadata()){
// if(m instanceof Attribute){
// attributes.put(((Attribute) m).getName(), (Attribute) m);
// }
// }
// }
// catch(Exception e){
// logger.warning("Unable to retrieve metadata from object");
// return null;
// }
//
// return attributes;
// }
/**
@ -85,7 +85,7 @@ public class HDF5Utilities {
* @return
*/
public static Group createGroup( FileFormat file, String groupName) {
return createGroup(file, (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject(), groupName);
return createGroup(file, (Group) file.getRootObject(), groupName);
}
/**
@ -100,7 +100,7 @@ public class HDF5Utilities {
Objects.requireNonNull(groupName);
if (group == null){
group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject();
group = (Group) file.getRootObject();
}
Group ngroup = group;
@ -130,7 +130,7 @@ public class HDF5Utilities {
* @return
*/
public static List<Dataset> getDatasets(H5File file) {
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) file.getRootNode()).getUserObject();
Group rootNode = (Group) file.getRootObject();
List<Dataset> datasets = getDatasets(rootNode);
return datasets;
}

View File

@ -1,6 +1,12 @@
package ch.psi.imagej.hdf5;
import hdf.object.Dataset;
import hdf.object.Datatype;
import hdf.object.FileFormat;
import hdf.object.Group;
import hdf.object.h5.H5Datatype;
import hdf.object.h5.H5File;
import ij.*;
import ij.io.*;
import ij.plugin.filter.PlugInFilter;
@ -10,9 +16,9 @@ import ij.gui.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import ncsa.hdf.object.*; // the common object package
import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
//import hdf.object.*; // the common object package
//import hdf.object.h5.*; // the HDF5 implementation
import hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5Writer implements PlugInFilter {
@ -60,18 +66,22 @@ public class HDF5Writer implements PlugInFilter {
int imgColorType = imp.getType();
Datatype type = null;
if (imgColorType == ImagePlus.GRAY8) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
type = new H5Datatype(Datatype.CLASS_INTEGER, 2, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
} else if (imgColorType == ImagePlus.COLOR_RGB) {
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
try {
if (imgColorType == ImagePlus.GRAY8) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
type = new H5Datatype(Datatype.CLASS_INTEGER, 2, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
} else if (imgColorType == ImagePlus.COLOR_RGB) {
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
}
} catch (Exception e){
throw new RuntimeException("Unable to create dataset", e);
}
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {

View File

@ -14,7 +14,7 @@ import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ScrollPaneConstants;
import ncsa.hdf.object.Dataset;
import hdf.object.Dataset;
import javax.swing.JTextField;
import java.awt.FlowLayout;

View File

@ -3,8 +3,8 @@ package ch.psi.imagej.hdf5;
import java.util.logging.Level;
import java.util.logging.Logger;
import ncsa.hdf.object.Dataset;
import ncsa.hdf.object.h5.H5File;
import hdf.object.Dataset;
import hdf.object.h5.H5File;
import ij.ImageStack;
import ij.process.ByteProcessor;
import ij.process.ColorProcessor;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,445 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
/**
* A CompoundDS is a dataset with compound datatype.
* <p>
* A compound datatype is an aggregation of one or more datatypes. Each member
* of a compound type has a name which is unique within that type, and a
* datatype of that member in a compound datum. Compound datatypes can be nested,
* i.e. members of a compound datatype can be some other compound datatype.
* <p>
* For more details on compound datatypes,
* see <b> <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a> </b>
* <p>
* Since Java cannot handle C-structured compound data, data in a compound dataset
* is loaded in to an Java List. Each element of the list is a data array that
* corresponds to a compound field. The data is read/written by compound field.
* <p>
* For example, if compound dataset "comp" has the following nested structure,
* and member datatypes
*
* <pre>
* comp --&gt; m01 (int)
* comp --&gt; m02 (float)
* comp --&gt; nest1 --&gt; m11 (char)
* comp --&gt; nest1 --&gt; m12 (String)
* comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
* comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
* </pre>
*
* The data object is a Java list of six arrays: {int[], float[], char[],
* Stirng[], long[] and double[]}.
*
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public abstract class CompoundDS extends Dataset implements CompoundDataFormat {
private static final long serialVersionUID = -4880399929644095662L;
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(CompoundDS.class);
/**
* A single character to separate the names of nested compound fields. An
* extended ASCII character, 0x95, is used to avoid common characters in
* compound names.
*/
public static final String SEPARATOR = "\u0095";
/**
* The number of members of the compound dataset.
*/
protected int numberOfMembers;
/**
* The names of members of the compound dataset.
*/
protected String[] memberNames;
/**
* Returns array containing the total number of elements of the members of
* this compound dataset.
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* memberOrders is an integer array of {1, 5, 6} to indicate that member A
* has one element, member B has 5 elements, and member C has 6 elements.
*/
protected int[] memberOrders;
/**
* The dimension sizes of each member.
* <p>
* The i-th element of the Object[] is an integer array (int[]) that
* contains the dimension sizes of the i-th member.
*/
protected transient Object[] memberDims;
/**
* The datatypes of compound members.
*/
protected Datatype[] memberTypes;
/**
* The array to store flags to indicate if a member of this compound
* dataset is selected for read/write.
* <p>
* If a member is selected, the read/write will perform on the member.
* Applications such as HDFView will only display the selected members of
* the compound dataset.
*
* <pre>
* For example, if a compound dataset has four members
* String[] memberNames = {"X", "Y", "Z", "TIME"};
* and
* boolean[] isMemberSelected = {true, false, false, true};
* members "X" and "TIME" are selected for read and write.
* </pre>
*/
protected boolean[] isMemberSelected;
/**
* Constructs a CompoundDS object with the given file, dataset name and path.
* <p>
* The dataset object represents an existing dataset in the file. For
* example, new H5CompoundDS(file, "dset1", "/g0/") constructs a dataset
* object that corresponds to the dataset, "dset1", at group "/g0/".
* <p>
* This object is usually constructed at FileFormat.open(), which loads the
* file structure and object information into memory. It is rarely used
* elsewhere.
*
* @param theFile
* the file that contains the dataset.
* @param dsName
* the name of the CompoundDS, e.g. "compDS".
* @param dsPath
* the full path of the CompoundDS, e.g. "/g1".
*/
public CompoundDS(FileFormat theFile, String dsName, String dsPath) {
this(theFile, dsName, dsPath, null);
}
/**
* @deprecated Not for public use in the future.<br>
* Using {@link #CompoundDS(FileFormat, String, String)}
*
* @param theFile
* the file that contains the dataset.
* @param dsName
* the name of the CompoundDS, e.g. "compDS".
* @param dsPath
* the full path of the CompoundDS, e.g. "/g1".
* @param oid
* the oid of the CompoundDS.
*/
@Deprecated
public CompoundDS(FileFormat theFile, String dsName, String dsPath, long[] oid) {
super(theFile, dsName, dsPath, oid);
numberOfMembers = 0;
memberNames = null;
isMemberSelected = null;
memberTypes = null;
}
/**
* Returns the number of members of the compound dataset.
*
* @return the number of members of the compound dataset.
*/
@Override
public final int getMemberCount() {
return numberOfMembers;
}
/**
* Returns the number of selected members of the compound dataset.
*
* Selected members are the compound fields which are selected for
* read/write.
* <p>
* For example, in a compound datatype of {int A, float B, char[] C},
* users can choose to retrieve only {A, C} from the dataset. In this
* case, getSelectedMemberCount() returns two.
*
* @return the number of selected members.
*/
@Override
public final int getSelectedMemberCount() {
int count = 0;
if (isMemberSelected != null) {
for (int i = 0; i < isMemberSelected.length; i++) {
if (isMemberSelected[i]) {
count++;
}
}
}
log.trace("count of selected members={}", count);
return count;
}
/**
* Returns the names of the members of the compound dataset. The names of
* compound members are stored in an array of Strings.
* <p>
* For example, for a compound datatype of {int A, float B, char[] C}
* getMemberNames() returns ["A", "B", "C"}.
*
* @return the names of compound members.
*/
@Override
public final String[] getMemberNames() {
return memberNames;
}
/**
* Returns an array of the names of the selected members of the compound dataset.
*
* @return an array of the names of the selected members of the compound dataset.
*/
public final String[] getSelectedMemberNames() {
if (isMemberSelected == null) {
log.debug("getSelectedMemberNames(): isMemberSelected array is null");
log.trace("getSelectedMemberNames(): finish");
return memberNames;
}
int idx = 0;
String[] names = new String[getSelectedMemberCount()];
for (int i = 0; i < isMemberSelected.length; i++) {
if (isMemberSelected[i]) {
names[idx++] = memberNames[i];
}
}
return names;
}
/**
* Checks if a member of the compound dataset is selected for read/write.
*
* @param idx
* the index of compound member.
*
* @return true if the i-th memeber is selected; otherwise returns false.
*/
@Override
public final boolean isMemberSelected(int idx) {
if ((isMemberSelected != null) && (isMemberSelected.length > idx)) {
return isMemberSelected[idx];
}
else {
return false;
}
}
/**
* Selects the i-th member for read/write.
*
* @param idx
* the index of compound member.
*/
@Override
public final void selectMember(int idx) {
if ((isMemberSelected != null) && (isMemberSelected.length > idx)) {
isMemberSelected[idx] = true;
}
}
/**
* Selects/deselects all members.
*
* @param selectAll
* The indicator to select or deselect all members. If true, all
* members are selected for read/write. If false, no member is
* selected for read/write.
*/
@Override
public final void setAllMemberSelection(boolean selectAll) {
if (isMemberSelected == null) {
return;
}
for (int i = 0; i < isMemberSelected.length; i++) {
isMemberSelected[i] = selectAll;
}
}
/**
* Returns array containing the total number of elements of the members of
* the compound dataset.
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* getMemberOrders() will return an integer array of {1, 5, 6} to indicate
* that member A has one element, member B has 5 elements, and member C has
* 6 elements.
*
* @return the array containing the total number of elements of the members
* of compound.
*/
@Override
public final int[] getMemberOrders() {
return memberOrders;
}
/**
* Returns array containing the total number of elements of the selected
* members of the compound dataset.
*
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* If A and B are selected, getSelectedMemberOrders() returns an array of
* {1, 5}
*
* @return array containing the total number of elements of the selected
* members of compound.
*/
@Override
public final int[] getSelectedMemberOrders() {
log.trace("getSelectedMemberOrders(): start");
if (isMemberSelected == null) {
log.debug("getSelectedMemberOrders(): isMemberSelected array is null");
log.trace("getSelectedMemberOrders(): finish");
return memberOrders;
}
int idx = 0;
int[] orders = new int[getSelectedMemberCount()];
for (int i = 0; i < isMemberSelected.length; i++) {
if (isMemberSelected[i]) {
orders[idx++] = memberOrders[i];
}
}
log.trace("getSelectedMemberOrders(): finish");
return orders;
}
/**
* Returns the dimension sizes of the i-th member.
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1)
* returns an array of {5}, and getMemberDims(0) returns null.
*
* @param i the i-th member
*
* @return the dimension sizes of the i-th member, null if the compound
* member is not an array.
*/
@Override
public final int[] getMemberDims(int i) {
if (memberDims == null) {
return null;
}
return (int[]) memberDims[i];
}
/**
* Returns an array of datatype objects of compound members.
* <p>
* Each member of a compound dataset has its own datatype. The datatype of a
* member can be atomic or other compound datatype (nested compound).
* Sub-classes set up the datatype objects at init().
* <p>
*
* @return the array of datatype objects of the compound members.
*/
@Override
public final Datatype[] getMemberTypes() {
return memberTypes;
}
/**
* Returns an array of datatype objects of selected compound members.
*
* @return an array of datatype objects of selected compound members.
*/
@Override
public final Datatype[] getSelectedMemberTypes() {
log.trace("getSelectedMemberTypes(): start");
if (isMemberSelected == null) {
log.debug("getSelectedMemberTypes(): isMemberSelected array is null");
log.trace("getSelectedMemberTypes(): finish");
return memberTypes;
}
int idx = 0;
Datatype[] types = new Datatype[getSelectedMemberCount()];
for (int i = 0; i < isMemberSelected.length; i++) {
if (isMemberSelected[i]) {
types[idx++] = memberTypes[i];
}
}
log.trace("getSelectedMemberTypes(): finish");
return types;
}
/**
* @deprecated Not implemented for compound dataset.
*/
@Deprecated
@Override
public Dataset copy(Group pgroup, String name, long[] dims, Object data)
throws Exception {
throw new UnsupportedOperationException(
"Writing a subset of a compound dataset to a new dataset is not implemented.");
}
}

View File

@ -0,0 +1,184 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
/**
* An interface that provides general operations for data with a Compound
* datatype. For example, getting the names, dataspaces or datatypes of the
* members of the Compound datatype.
* <p>
*
* @see hdf.object.HObject
*
* @version 1.0 5/3/2018
* @author Jordan T. Henderson
*/
public interface CompoundDataFormat extends DataFormat {
/**
* Returns the number of members of the compound data object.
*
* @return the number of members of the compound data object.
*/
public abstract int getMemberCount();
/**
* Returns the number of selected members of the compound data object.
*
* Selected members are the compound fields which are selected for read/write.
* <p>
* For example, in a compound datatype of {int A, float B, char[] C}, users can
* choose to retrieve only {A, C} from the data object. In this case,
* getSelectedMemberCount() returns two.
*
* @return the number of selected members.
*/
public abstract int getSelectedMemberCount();
/**
* Returns the names of the members of the compound data object. The names of
* compound members are stored in an array of Strings.
* <p>
* For example, for a compound datatype of {int A, float B, char[] C}
* getMemberNames() returns ["A", "B", "C"}.
*
* @return the names of compound members.
*/
public abstract String[] getMemberNames();
/**
* Returns an array of the names of the selected compound members.
*
* @return an array of the names of the selected compound members.
*/
public abstract String[] getSelectedMemberNames();
/**
* Checks if a member of the compound data object is selected for read/write.
*
* @param idx
* the index of compound member.
*
* @return true if the i-th memeber is selected; otherwise returns false.
*/
public abstract boolean isMemberSelected(int idx);
/**
* Selects the i-th member for read/write.
*
* @param idx
* the index of compound member.
*/
public abstract void selectMember(int idx);
/**
* Selects/deselects all members.
*
* @param selectAll
* The indicator to select or deselect all members. If true, all
* members are selected for read/write. If false, no member is
* selected for read/write.
*/
public abstract void setAllMemberSelection(boolean selectAll);
/**
* Returns array containing the total number of elements of the members of the
* compound data object.
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* getMemberOrders() will return an integer array of {1, 5, 6} to indicate that
* member A has one element, member B has 5 elements, and member C has 6
* elements.
*
* @return the array containing the total number of elements of the members of
* the compound data object.
*/
public abstract int[] getMemberOrders();
/**
* Returns array containing the total number of elements of the selected members
* of the compound data object.
*
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* If A and B are selected, getSelectedMemberOrders() returns an array of {1, 5}
*
* @return array containing the total number of elements of the selected members
* of the compound data object.
*/
public abstract int[] getSelectedMemberOrders();
/**
* Returns the dimension sizes of the i-th member.
* <p>
* For example, a compound dataset COMP has members of A, B and C as
*
* <pre>
* COMP {
* int A;
* float B[5];
* double C[2][3];
* }
* </pre>
*
* getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1) returns
* an array of {5}, and getMemberDims(0) returns null.
*
* @param i
* the i-th member
*
* @return the dimension sizes of the i-th member, null if the compound member
* is not an array.
*/
public abstract int[] getMemberDims(int i);
/**
* Returns an array of datatype objects of the compound members.
* <p>
* Each member of a compound data object has its own datatype. The datatype of a
* member can be atomic or other compound datatype (nested compound). The
* datatype objects are setup at init().
* <p>
*
* @return the array of datatype objects of the compound members.
*/
public abstract Datatype[] getMemberTypes();
/**
* Returns an array of datatype objects of the selected compound members.
*
* @return an array of datatype objects of the selected compound members.
*/
public abstract Datatype[] getSelectedMemberTypes();
}

View File

@ -0,0 +1,366 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
/**
* An interface that provides general I/O operations for object data. For
* example, reading data content from the file into memory or writing data
* content from memory into the file.
* <p>
*
* @see hdf.object.HObject
*
* @version 1.0 4/2/2018
* @author Jordan T. Henderson
*/
public interface DataFormat {
public abstract boolean isInited();
public abstract void init();
/**
* Retrieves the object's data from the file.
*
* @return the object's data.
*
* @throws Exception
* if the data can not be retrieved
*/
public abstract Object getData() throws Exception, OutOfMemoryError;
/**
*
*
* @param data
* the data to write.
*/
public abstract void setData(Object data);
/**
* Clears the current data buffer in memory and forces the next read() to load
* the data from file.
* <p>
* The function read() loads data from file into memory only if the data is not
* read. If data is already in memory, read() just returns the memory buffer.
* Sometimes we want to force read() to re-read data from file. For example,
* when the selection is changed, we need to re-read the data.
*
* @see #getData()
* @see #read()
*/
public abstract void clearData();
/**
* Reads the data from file.
* <p>
* read() reads the data from file to a memory buffer and returns the memory
* buffer. The dataset object does not hold the memory buffer. To store the
* memory buffer in the dataset object, one must call getData().
* <p>
* By default, the whole dataset is read into memory. Users can also select
* a subset to read. Subsetting is done in an implicit way.
*
* @return the data read from file.
*
* @see #getData()
*
* @throws Exception
* if object can not be read
* @throws OutOfMemoryError
* if memory is exhausted
*/
public abstract Object read() throws Exception, OutOfMemoryError;
/**
* Writes a memory buffer to the object in the file.
*
* @param buf
* the data to write
*
* @throws Exception
* if data can not be written
*/
public abstract void write(Object buf) throws Exception;
/**
* Writes the current memory buffer to the object in the file.
*
* @throws Exception
* if data can not be written
*/
public abstract void write() throws Exception;
/**
* Converts the data values of this data object to appropriate Java integers if
* they are unsigned integers.
*
* @see hdf.object.Dataset#convertToUnsignedC(Object)
* @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
*
* @return the converted data buffer.
*/
public Object convertFromUnsignedC();
/**
* Converts Java integer data values of this data object back to unsigned C-type
* integer data if they are unsigned integers.
*
* @see hdf.object.Dataset#convertToUnsignedC(Object)
* @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
*
* @return the converted data buffer.
*/
public Object convertToUnsignedC();
/**
* Returns the fill values for the data object.
*
* @return the fill values for the data object.
*/
public abstract Object getFillValue();
/**
* Returns the datatype of the data object.
*
* @return the datatype of the data object.
*/
public abstract Datatype getDatatype();
/**
* Returns the rank (number of dimensions) of the data object. It returns a
* negative number if it failed to retrieve the dimension information from
* the file.
*
* @return the number of dimensions of the data object.
*/
public abstract int getRank();
/**
* Returns the array that contains the dimension sizes of the data value of
* the data object. It returns null if it failed to retrieve the dimension
* information from the file.
*
* @return the dimension sizes of the data object.
*/
public abstract long[] getDims();
/****************************************************************
* * The following four definitions are used for data subsetting. * *
****************************************************************/
/**
* Returns the dimension sizes of the selected subset.
* <p>
* The SelectedDims is the number of data points of the selected subset.
* Applications can use this array to change the size of selected subset.
*
* The selected size must be less than or equal to the current dimension size.
* Combined with the starting position, selected sizes and stride, the subset of
* a rectangle selection is fully defined.
* <p>
* For example, if a 4 X 5 dataset is as follows:
*
* <pre>
* 0, 1, 2, 3, 4
* 10, 11, 12, 13, 14
* 20, 21, 22, 23, 24
* 30, 31, 32, 33, 34
* long[] dims = {4, 5};
* long[] startDims = {1, 2};
* long[] selectedDims = {3, 3};
* long[] selectedStride = {1, 1};
* then the following subset is selected by the startDims and selectedDims
* 12, 13, 14
* 22, 23, 24
* 32, 33, 34
* </pre>
*
* @return the dimension sizes of the selected subset.
*/
public abstract long[] getSelectedDims();
/**
* Returns the starting position of a selected subset.
* <p>
* Applications can use this array to change the starting position of a
* selection. Combined with the selected dimensions, selected sizes and stride,
* the subset of a rectangle selection is fully defined.
* <p>
* For example, if a 4 X 5 dataset is as follows:
*
* <pre>
* 0, 1, 2, 3, 4
* 10, 11, 12, 13, 14
* 20, 21, 22, 23, 24
* 30, 31, 32, 33, 34
* long[] dims = {4, 5};
* long[] startDims = {1, 2};
* long[] selectedDims = {3, 3};
* long[] selectedStride = {1, 1};
* then the following subset is selected by the startDims and selectedDims
* 12, 13, 14
* 22, 23, 24
* 32, 33, 34
* </pre>
*
* @return the starting position of a selected subset.
*/
public abstract long[] getStartDims();
/**
* Returns the selectedStride of the selected dataset.
* <p>
* Applications can use this array to change how many elements to move in each
* dimension.
*
* Combined with the starting position and selected sizes, the subset of a
* rectangle selection is defined.
* <p>
* For example, if a 4 X 5 dataset is as follows:
*
* <pre>
* 0, 1, 2, 3, 4
* 10, 11, 12, 13, 14
* 20, 21, 22, 23, 24
* 30, 31, 32, 33, 34
* long[] dims = {4, 5};
* long[] startDims = {0, 0};
* long[] selectedDims = {2, 2};
* long[] selectedStride = {2, 3};
* then the following subset is selected by the startDims and selectedDims
* 0, 3
* 20, 23
* </pre>
*
* @return the selectedStride of the selected dataset.
*/
public abstract long[] getStride();
/**
* Returns the indices of display order.
* <p>
*
* selectedIndex[] is provided for two purposes:
* <OL>
* <LI>selectedIndex[] is used to indicate the order of dimensions for display.
* selectedIndex[0] is for the row, selectedIndex[1] is for the column and
* selectedIndex[2] for the depth.
* <p>
* For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3},
* then dim[1] is selected as row index, dim[2] is selected as column index and
* dim[3] is selected as depth index.
* <LI>selectedIndex[] is also used to select dimensions for display for
* datasets with three or more dimensions. We assume that applications such as
* HDFView can only display data values up to three dimensions (2D
* spreadsheet/image with a third dimension which the 2D spreadsheet/image is
* selected from). For datasets with more than three dimensions, we need
* selectedIndex[] to tell applications which three dimensions are chosen for
* display. <br>
* For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3},
* then dim[1] is selected as row index, dim[2] is selected as column index and
* dim[3] is selected as depth index. dim[0] is not selected. Its location is
* fixed at 0 by default.
* </OL>
*
* @return the array of the indices of display order.
*/
public int[] getSelectedIndex();
/**************************************************************************
* * The following two definitions are used primarily for GUI applications. * *
**************************************************************************/
/**
* Returns the dimension size of the vertical axis.
*
* <p>
* This function is used by GUI applications such as HDFView. GUI applications
* display a dataset in a 2D table or 2D image. The display order is specified
* by the index array of selectedIndex as follow:
* <dl>
* <dt>selectedIndex[0] -- height</dt>
* <dd>The vertical axis</dd>
* <dt>selectedIndex[1] -- width</dt>
* <dd>The horizontal axis</dd>
* <dt>selectedIndex[2] -- depth</dt>
* <dd>The depth axis is used for 3 or more dimensional datasets.</dd>
* </dl>
* Applications can use getSelectedIndex() to access and change the display
* order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the
* following code will set the height=200 and width=50.
*
* <pre>
* int[] selectedIndex = dataset.getSelectedIndex();
* selectedIndex[0] = 0;
* selectedIndex[1] = 1;
* </pre>
*
* @see #getSelectedIndex()
* @see #getWidth()
*
* @return the size of dimension of the vertical axis.
*/
public long getHeight();
/**
* Returns the dimension size of the horizontal axis.
*
* <p>
* This function is used by GUI applications such as HDFView. GUI applications
* display a dataset in 2D Table or 2D Image. The display order is specified by
* the index array of selectedIndex as follow:
* <dl>
* <dt>selectedIndex[0] -- height</dt>
* <dd>The vertical axis</dd>
* <dt>selectedIndex[1] -- width</dt>
* <dd>The horizontal axis</dd>
* <dt>selectedIndex[2] -- depth</dt>
* <dd>The depth axis, which is used for 3 or more dimension datasets.</dd>
* </dl>
* Applications can use getSelectedIndex() to access and change the display
* order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the
* following code will set the height=200 and width=100.
*
* <pre>
* int[] selectedIndex = dataset.getSelectedIndex();
* selectedIndex[0] = 0;
* selectedIndex[1] = 1;
* </pre>
*
* @see #getSelectedIndex()
* @see #getHeight()
*
* @return the size of dimension of the horizontal axis.
*/
public long getWidth();
/**
* Returns the string representation of compression information.
* <p>
* For example, "SZIP: Pixels per block = 8: H5Z_FILTER_CONFIG_DECODE_ENABLED".
*
* @return the string representation of compression information.
*/
public abstract String getCompression();
/**
* Get runtime Class of the original data buffer if converted.
*
* @return the Class of the original data buffer
*/
@SuppressWarnings("rawtypes")
public abstract Class getOriginalClass();
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,933 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/**
* Datatype is an abstract class that defines datatype characteristics and APIs for a data type.
* <p>
* A datatype has four basic characteristics: class, size, byte order and sign. These
* characteristics are defined in the
* <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>.
* <p>
* These characteristics apply to all the sub-classes. The sub-classes may have different ways to
* describe a datatype. We here define the <strong> native datatype</strong> to the datatype used by
* the sub-class. For example, H5Datatype uses a datatype identifier (hid_t) to specify a datatype.
* NC2Datatype uses ucar.nc2.DataType object to describe its datatype. "Native" here is different
* from the "native" definition in the HDF5 library.
* <p>
* Two functions, createNative() and fromNative(), are defined to convert the general
* characteristics to/from the native datatype. Sub-classes must implement these functions so that
* the conversion will be done correctly. The values of the CLASS member are not identical to HDF5
* values for a datatype class.
* <p>
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public abstract class Datatype extends HObject implements MetaDataContainer {
private static final long serialVersionUID = -581324710549963177L;
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Datatype.class);
/**
* The default definition for datatype size, order, and sign.
*/
public static final int NATIVE = -1;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_NO_CLASS = -1;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_INTEGER = 0;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_FLOAT = 1;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_CHAR = 2;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_STRING = 3;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_BITFIELD = 4;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_OPAQUE = 5;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_COMPOUND = 6;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_REFERENCE = 7;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_ENUM = 8;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_VLEN = 9;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_ARRAY = 10;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int CLASS_TIME = 11;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int ORDER_LE = 0;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int ORDER_BE = 1;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int ORDER_VAX = 2;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int ORDER_NONE = 3;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int SIGN_NONE = 0;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int SIGN_2 = 1;
/**
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*/
public static final int NSGN = 2;
protected String datatypeDescription = null;
/**
* The class of the datatype.
*/
protected int datatypeClass;
/**
* The size (in bytes) of the datatype.
*/
protected long datatypeSize;
/**
* The byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, and
* ORDER_VAX.
*/
protected int datatypeOrder;
/**
* The sign of the datatype.
*/
protected int datatypeSign;
/**
* The base datatype of this datatype (null if this datatype is atomic).
*/
protected Datatype baseType;
/**
* The dimensions of the ARRAY element of an ARRAY datatype.
*/
protected long[] arrayDims;
/**
* Determines whether this datatype is a variable-length type.
*/
protected boolean isVLEN = false;
protected boolean isVariableStr = false;
/**
* The (name, value) pairs of enum members.
*/
protected Map<String, String> enumMembers;
/**
* The list of names of members of a compound Datatype.
*/
protected List<String> compoundMemberNames;
/**
* The list of types of members of a compound Datatype.
*/
protected List<Datatype> compoundMemberTypes;
/**
* The list of offsets of members of a compound Datatype.
*/
protected List<Long> compoundMemberOffsets;
/**
* Constructs a named datatype with a given file, name and path.
*
* @param theFile
* the HDF file.
* @param typeName
* the name of the datatype, e.g "12-bit Integer".
* @param typePath
* the full group path of the datatype, e.g. "/datatypes/".
*/
public Datatype(FileFormat theFile, String typeName, String typePath) {
this(theFile, typeName, typePath, null);
}
/**
* @deprecated Not for public use in the future.<br>
* Using {@link #Datatype(FileFormat, String, String)}
*
* @param theFile
* the HDF file.
* @param typeName
* the name of the datatype, e.g "12-bit Integer".
* @param typePath
* the full group path of the datatype, e.g. "/datatypes/".
* @param oid
* the oidof the datatype.
*/
@Deprecated
public Datatype(FileFormat theFile, String typeName, String typePath, long[] oid) {
super(theFile, typeName, typePath, oid);
}
/**
* Constructs a Datatype with specified class, size, byte order and sign.
* <p>
* The following is a list of a few examples of Datatype.
* <ol>
* <li>to create unsigned native integer<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
* <li>to create 16-bit signed integer with big endian<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
* <li>to create native float<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
* <li>to create 64-bit double<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
* </ol>
*
* @param tclass
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc.
* @param tsize
* the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4.
* Valid values are NATIVE or a positive value.
* @param torder
* the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX,
* ORDER_NONE and NATIVE.
* @param tsign
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
*
* @throws Exception
* if there is an error
*/
public Datatype(int tclass, int tsize, int torder, int tsign) throws Exception {
this(tclass, tsize, torder, tsign, null);
}
/**
* Constructs a Datatype with specified class, size, byte order and sign.
* <p>
* The following is a list of a few examples of Datatype.
* <ol>
* <li>to create unsigned native integer<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
* <li>to create 16-bit signed integer with big endian<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
* <li>to create native float<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
* <li>to create 64-bit double<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
* </ol>
*
* @param tclass
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and
* etc.
* @param tsize
* the size of the datatype in bytes, e.g. for a 32-bit integer,
* the size is 4.
* Valid values are NATIVE or a positive value.
* @param torder
* the byte order of the datatype. Valid values are ORDER_LE,
* ORDER_BE, ORDER_VAX, ORDER_NONE and NATIVE.
* @param tsign
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
* @param tbase
* the base datatype of the new datatype
*
* @throws Exception
* if there is an error
*/
public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception {
this(tclass, tsize, torder, tsign, tbase, null);
}
/**
* Constructs a Datatype with specified class, size, byte order and sign.
* <p>
* The following is a list of a few examples of Datatype.
* <ol>
* <li>to create unsigned native integer<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
* <li>to create 16-bit signed integer with big endian<br>
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
* <li>to create native float<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
* <li>to create 64-bit double<br>
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
* </ol>
*
* @param tclass
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc.
* @param tsize
* the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4.
* Valid values are NATIVE or a positive value.
* @param torder
* the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX,
* ORDER_NONE and NATIVE.
* @param tsign
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
* @param tbase
* the base datatype of the new datatype
* @param pbase
* the parent datatype of the new datatype
*
* @throws Exception
* if there is an error
*/
public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, Datatype pbase) throws Exception {
if ((tsize == 0) || (tsize < 0 && tsize != NATIVE))
throw new Exception("invalid datatype size - " + tsize);
if ((torder != ORDER_LE) && (torder != ORDER_BE) && (torder != ORDER_VAX)
&& (torder != ORDER_NONE) && (torder != NATIVE))
throw new Exception("invalid datatype order - " + torder);
if ((tsign != SIGN_NONE) && (tsign != SIGN_2) && (tsign != NATIVE))
throw new Exception("invalid datatype sign - " + tsign);
datatypeClass = tclass;
datatypeSize = tsize;
datatypeOrder = torder;
datatypeSign = tsign;
enumMembers = null;
baseType = tbase;
arrayDims = null;
isVariableStr = (datatypeClass == Datatype.CLASS_STRING) && (tsize < 0);
isVLEN = (datatypeClass == Datatype.CLASS_VLEN) || isVariableStr;
compoundMemberNames = new ArrayList<>();
compoundMemberTypes = new ArrayList<>();
compoundMemberOffsets = new ArrayList<>();
log.trace("datatypeClass={} datatypeSize={} datatypeOrder={} datatypeSign={} baseType={}",
datatypeClass, datatypeSize, datatypeOrder, datatypeSign, baseType);
}
/**
* Constructs a Datatype with a given native datatype identifier.
* <p>
* For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5,
*
* <pre>
* long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
* Datatype dtype = new Datatype(tid);
* </pre>
*
* will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
*
* @see #fromNative(long tid)
* @param tid
* the native datatype identifier.
*
* @throws Exception
* if there is an error
*/
public Datatype(long tid) throws Exception {
this(tid, null);
}
/**
* Constructs a Datatype with a given native datatype identifier.
* <p>
* For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5,
*
* <pre>
* long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
* Datatype dtype = new Datatype(tid);
* </pre>
*
* will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
*
* @see #fromNative(long tid)
* @param tid
* the native datatype identifier.
* @param pbase
* the parent datatype of the new datatype
*
* @throws Exception
* if there is an error
*/
public Datatype(long tid, Datatype pbase) throws Exception {
this(CLASS_NO_CLASS, NATIVE, NATIVE, NATIVE, null, pbase);
}
/**
* Opens access to this named datatype. Sub-classes must replace this default implementation. For
* example, in H5Datatype, open() function H5.H5Topen(loc_id, name) to get the datatype identifier.
*
* @return the datatype identifier if successful; otherwise returns negative value.
*/
@Override
public long open() {
return -1;
}
/**
* Closes a datatype identifier.
* <p>
* Sub-classes must replace this default implementation.
*
* @param id
* the datatype identifier to close.
*/
@Override
public abstract void close(long id);
/**
* Returns the class of the datatype. Valid values are:
* <ul>
* <li>CLASS_NO_CLASS
* <li>CLASS_INTEGER
* <li>CLASS_FLOAT
* <li>CLASS_CHAR
* <li>CLASS_STRING
* <li>CLASS_BITFIELD
* <li>CLASS_OPAQUE
* <li>CLASS_COMPOUND
* <li>CLASS_REFERENCE
* <li>CLASS_ENUM
* <li>CLASS_VLEN
* <li>CLASS_ARRAY
* </ul>
*
* @return the class of the datatype.
*/
public int getDatatypeClass() {
return datatypeClass;
}
/**
* Returns the size of the datatype in bytes. For example, for a 32-bit
* integer, the size is 4 (bytes).
*
* @return the size of the datatype.
*/
public long getDatatypeSize() {
return datatypeSize;
}
/**
* Returns the byte order of the datatype. Valid values are
* <ul>
* <li>ORDER_LE
* <li>ORDER_BE
* <li>ORDER_VAX
* <li>ORDER_NONE
* </ul>
*
* @return the byte order of the datatype.
*/
public int getDatatypeOrder() {
return datatypeOrder;
}
/**
* Returns the sign (SIGN_NONE, SIGN_2) of an integer datatype.
*
* @return the sign of the datatype.
*/
public int getDatatypeSign() {
return datatypeSign;
}
/**
* Returns the base datatype for this datatype.
* <p>
* For example, in a dataset of type ARRAY of integer, the datatype of the dataset is ARRAY. The
* datatype of the base type is integer.
*
* @return the datatype of the contained basetype.
*/
public Datatype getDatatypeBase() {
return baseType;
}
/**
* Sets the (key, value) pairs of enum members for enum datatype.
* <p>
* For Example,
* <dl>
* <dt>setEnumMembers("-40=lowTemp, 90=highTemp")</dt>
* <dd>sets the key of enum member lowTemp to -40 and highTemp to 90.</dd>
* <dt>setEnumMembers("lowTemp, highTemp")</dt>
* <dd>sets enum members to defaults, i.e. 0=lowTemp and 1=highTemp</dd>
* <dt>setEnumMembers("10=lowTemp, highTemp")</dt>
* <dd>sets enum member lowTemp to 10 and highTemp to 11.</dd>
* </dl>
*
* @param enumStr
* the (key, value) pairs of enum members
*/
public final void setEnumMembers(String enumStr) {
log.trace("setEnumMembers: is_enum enum_members={}", enumStr);
enumMembers = new HashMap<>();
String[] entries = enumStr.split(",");
for (String entry : entries) {
String[] keyValue = entry.split("=");
enumMembers.put(keyValue[0].trim(), keyValue[1].trim());
if (log.isTraceEnabled())
log.trace("setEnumMembers: is_enum value={} name={}", keyValue[0].trim(), keyValue[1].trim());
}
}
/**
* Returns the Map&lt;String,String&gt; pairs of enum members for enum datatype.
*
* @return enumStr Map&lt;String,String%gt; pairs of enum members
*/
public final Map<String, String> getEnumMembers() {
if (enumMembers == null) {
enumMembers = new HashMap<>();
enumMembers.put("1", "0");
enumMembers.put("2", "1");
}
return enumMembers;
}
/**
* Returns the HashMap pairs of enum members for enum datatype.
* <p>
* For Example,
* <dl>
* <dt>getEnumMembersAsString()</dt>
* <dd>returns "10=lowTemp, 40=highTemp"</dd>
* </dl>
*
* @return enumStr the (key, value) pairs of enum members
*/
@SuppressWarnings("rawtypes")
public final String getEnumMembersAsString() {
if (enumMembers == null) {
enumMembers = new HashMap<>();
enumMembers.put("1", "0");
enumMembers.put("2", "1");
}
StringBuilder enumStr = new StringBuilder();
Iterator<Entry<String, String>> entries = enumMembers.entrySet().iterator();
int i = enumMembers.size();
while (entries.hasNext()) {
Entry thisEntry = entries.next();
enumStr.append((String) thisEntry.getKey())
.append("=")
.append((String) thisEntry.getValue());
i--;
if (i > 0)
enumStr.append(", ");
}
return enumStr.toString();
}
/**
* Returns the dimensions of an Array Datatype.
*
* @return dims the dimensions of the Array Datatype
*/
public final long[] getArrayDims() {
return arrayDims;
}
public final List<String> getCompoundMemberNames() {
return compoundMemberNames;
}
public final List<Datatype> getCompoundMemberTypes() {
return compoundMemberTypes;
}
/**
* Converts the datatype object to a native datatype.
*
* Subclasses must implement it so that this datatype will be converted accordingly. Use close() to
* close the native identifier; otherwise, the datatype will be left open.
* <p>
* For example, a HDF5 datatype created from<br>
*
* <pre>
* H5Dataype dtype = new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
* int tid = dtype.createNative();
* </pre>
*
* The "tid" will be the HDF5 datatype id of a 64-bit unsigned integer, which is equivalent to
*
* <pre>
* int tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
* </pre>
*
* @return the identifier of the native datatype.
*/
public abstract long createNative();
/**
* Set datatype characteristics (class, size, byte order and sign) from a given datatype identifier.
* <p>
* Sub-classes must implement it so that this datatype will be converted accordingly.
* <p>
* For example, if the type identifier is a 64-bit unsigned integer created from HDF5,
*
* <pre>
* H5Datatype dtype = new H5Datatype();
* dtype.fromNative(HDF5Constants.H5T_NATIVE_UNINT32);
* </pre>
*
* Where dtype is equivalent to <br>
* new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
*
* @param nativeID
* the datatype identifier.
*/
public abstract void fromNative(long nativeID);
/**
* Returns a short text description of this datatype.
*
* @return a short text description of this datatype
*/
public String getDescription() {
log.trace("getDescription(): start");
if (datatypeDescription != null) {
log.trace("getDescription(): finish");
return datatypeDescription;
}
StringBuilder description = new StringBuilder();
switch (datatypeClass) {
case CLASS_CHAR:
description.append("8-bit ").append((isUnsigned() ? "unsigned " : "")).append("integer");
break;
case CLASS_INTEGER:
if (datatypeSize == NATIVE)
description.append("native ").append((isUnsigned() ? "unsigned " : "")).append("integer");
else
description.append(String.valueOf(datatypeSize * 8)).append("-bit ")
.append((isUnsigned() ? "unsigned " : "")).append("integer");
break;
case CLASS_FLOAT:
if (datatypeSize == NATIVE)
description.append("native floating-point");
else
description.append(String.valueOf(datatypeSize * 8)).append("-bit floating-point");
break;
case CLASS_STRING:
description.append("String");
break;
case CLASS_REFERENCE:
description.append("Object reference");
break;
case CLASS_OPAQUE:
if (datatypeSize == NATIVE)
description.append("native opaque");
else
description.append(String.valueOf(datatypeSize * 8)).append("-bit opaque");
break;
case CLASS_BITFIELD:
if (datatypeSize == NATIVE)
description.append("native bitfield");
else
description.append(String.valueOf(datatypeSize * 8)).append("-bit bitfield");
break;
case CLASS_ENUM:
if (datatypeSize == NATIVE)
description.append("native enum");
else
description.append(String.valueOf(datatypeSize * 8)).append("-bit enum");
break;
case CLASS_ARRAY:
description.append("Array");
if (arrayDims != null) {
description.append(" [");
for (int i = 0; i < arrayDims.length; i++) {
description.append(arrayDims[i]);
if (i < arrayDims.length - 1)
description.append(" x ");
}
description.append("]");
}
break;
case CLASS_COMPOUND:
description.append("Compound");
break;
case CLASS_VLEN:
description.append("Variable-length");
break;
default:
description.append("Unknown");
break;
}
if (baseType != null) {
description.append(" of " + baseType.getDescription());
}
log.trace("getDescription(): finish");
return description.toString();
}
/**
* Checks if this datatype is unsigned.
*
* @return true if the datatype is unsigned;
* otherwise, returns false.
*/
public boolean isUnsigned() {
if (baseType != null)
return baseType.isUnsigned();
else {
if (isCompound()) {
if ((compoundMemberTypes != null) && !compoundMemberTypes.isEmpty()) {
boolean allMembersUnsigned = true;
Iterator<Datatype> cmpdTypeListIT = compoundMemberTypes.iterator();
while (cmpdTypeListIT.hasNext()) {
Datatype next = cmpdTypeListIT.next();
allMembersUnsigned = allMembersUnsigned && next.isUnsigned();
}
return allMembersUnsigned;
}
else {
log.debug("isUnsigned(): compoundMemberTypes is null");
return false;
}
}
else {
return (datatypeSign == Datatype.SIGN_NONE);
}
}
}
public abstract boolean isText();
/**
* Checks if this datatype is an integer type.
*
* @return true if the datatype is integer; false otherwise
*/
public boolean isInteger() {
return (datatypeClass == Datatype.CLASS_INTEGER);
}
/**
* Checks if this datatype is a floating-point type.
*
* @return true if the datatype is floating-point; false otherwise
*/
public boolean isFloat() {
return (datatypeClass == Datatype.CLASS_FLOAT);
}
/**
* Checks if this datatype is a variable-length string type.
*
* @return true if the datatype is variable-length string; false otherwise
*/
public boolean isVarStr() {
return isVariableStr;
}
/**
* Checks if this datatype is a variable-length type.
*
* @return true if the datatype is variable-length; false otherwise
*/
public boolean isVLEN() {
return isVLEN;
}
/**
* Checks if this datatype is an compound type.
*
* @return true if the datatype is compound; false otherwise
*/
public boolean isCompound() {
return (datatypeClass == Datatype.CLASS_COMPOUND);
}
/**
* Checks if this datatype is an array type.
*
* @return true if the datatype is array; false otherwise
*/
public boolean isArray() {
return (datatypeClass == Datatype.CLASS_ARRAY);
}
/**
* Checks if this datatype is a string type.
*
* @return true if the datatype is string; false otherwise
*/
public boolean isString() {
return (datatypeClass == Datatype.CLASS_STRING);
}
/**
* Checks if this datatype is a character type.
*
* @return true if the datatype is character; false otherwise
*/
public boolean isChar() {
return (datatypeClass == Datatype.CLASS_CHAR);
}
/**
* Checks if this datatype is a reference type.
*
* @return true if the datatype is reference; false otherwise
*/
public boolean isRef() {
return (datatypeClass == Datatype.CLASS_REFERENCE);
}
/**
* Checks if this datatype is a enum type.
*
* @return true if the datatype is enum; false otherwise
*/
public boolean isEnum() {
return (datatypeClass == Datatype.CLASS_ENUM);
}
/**
* Checks if this datatype is a opaque type.
*
* @return true if the datatype is opaque; false otherwise
*/
public boolean isOpaque() {
return (datatypeClass == Datatype.CLASS_OPAQUE);
}
/**
* Checks if this datatype is a bitfield type.
*
* @return true if the datatype is bitfield; false otherwise
*/
public boolean isBitField() {
return (datatypeClass == Datatype.CLASS_BITFIELD);
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#getMetadata()
*/
@Override
@SuppressWarnings("rawtypes")
public List getMetadata() throws Exception {
return null;
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
*/
@Override
public void writeMetadata(Object info) throws Exception {
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:writeMetadata.");
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
*/
@Override
public void removeMetadata(Object info) throws Exception {
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:removeMetadata.");
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
*/
@Override
public void updateMetadata(Object info) throws Exception {
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:updateMetadata.");
}
@Override
public String toString() {
return getDescription();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,326 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Stack;
import java.util.Vector;
/**
* Group is an abstract class. Current implementing classes are the H4Group and
* H5Group. This class includes general information of a group object such as
* members of a group and common operations on groups.
* <p>
* Members of a group may include other groups, datasets or links.
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public abstract class Group extends HObject implements MetaDataContainer {
private static final long serialVersionUID = 3913174542591568052L;
/**
* The list of members (Groups and Datasets) of this group in memory.
*/
private List<HObject> memberList;
/**
* The parent group where this group is located. The parent of the root
* group is null.
*/
protected Group parent;
/**
* Total number of members of this group in file.
*/
protected int nMembersInFile;
public static final int LINK_TYPE_HARD = 0;
public static final int LINK_TYPE_SOFT = 1;
public static final int LINK_TYPE_EXTERNAL = 64;
public static final int CRT_ORDER_TRACKED = 1;
public static final int CRT_ORDER_INDEXED = 2;
/**
* Constructs an instance of the group with specific name, path and parent
* group. An HDF data object must have a name. The path is the group path
* starting from the root. The parent group is the group where this group is
* located.
* <p>
* For example, in H5Group(h5file, "grp", "/groups/", pgroup), "grp" is the
* name of the group, "/groups/" is the group path of the group, and pgroup
* is the group where "grp" is located.
*
* @param theFile
* the file containing the group.
* @param grpName
* the name of this group, e.g. "grp01".
* @param grpPath
* the full path of this group, e.g. "/groups/".
* @param grpParent
* the parent of this group.
*/
public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent) {
this(theFile, grpName, grpPath, grpParent, null);
}
/**
* @deprecated Not for public use in the future.<br>
* Using {@link #Group(FileFormat, String, String, Group)}
*
* @param theFile
* the file containing the group.
* @param grpName
* the name of this group, e.g. "grp01".
* @param grpPath
* the full path of this group, e.g. "/groups/".
* @param grpParent
* the parent of this group.
* @param oid
* the oid of this group.
*/
@Deprecated
public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent, long[] oid) {
super(theFile, grpName, grpPath, oid);
this.parent = grpParent;
}
/**
* Clears up member list and other resources in memory for the group. Since
* the destructor will clear memory space, the function is usually not
* needed.
*/
public void clear() {
if (memberList != null) {
((Vector<HObject>) memberList).setSize(0);
}
}
/**
* Adds an object to the member list of this group in memory.
*
* @param object
* the HObject to be added to the member list.
*/
public void addToMemberList(HObject object) {
if (memberList == null) {
int size = Math.min(getNumberOfMembersInFile(), this
.getFileFormat().getMaxMembers());
memberList = new Vector<>(size + 5);
}
if ((object != null) && !memberList.contains(object)) {
memberList.add(object);
}
}
/**
* Removes an object from the member list of this group in memory.
*
* @param object
* the HObject (Group or Dataset) to be removed from the member
* list.
*/
public void removeFromMemberList(HObject object) {
if (memberList != null) {
memberList.remove(object);
}
}
/**
* Returns the list of members of this group. The list is an java.util.List
* containing HObjects.
*
* @return the list of members of this group.
*/
public List<HObject> getMemberList() {
FileFormat theFile = this.getFileFormat();
if ((memberList == null) && (theFile != null)) {
int size = Math.min(getNumberOfMembersInFile(), this.getFileFormat().getMaxMembers());
memberList = new Vector<>(size + 5); // avoid infinite loop search for groups without members
// find the memberList from the file by checking the group path and
// name. group may be created out of the structure tree
// (H4/5File.loadTree()).
if (theFile.getFID() < 0) {
try {
theFile.open();
} // load the file structure;
catch (Exception ex) {
;
}
}
HObject root = theFile.getRootObject();
if (root == null) return memberList;
Iterator<HObject> it = ((Group) root).depthFirstMemberList().iterator();
Group g = null;
Object uObj = null;
while (it.hasNext()) {
uObj = it.next();
if (uObj instanceof Group) {
g = (Group) uObj;
if (g.getPath() != null) // add this check to get rid of null exception
{
if ((this.isRoot() && g.isRoot())
|| (this.getPath().equals(g.getPath()) &&
g.getName().endsWith(this.getName()))) {
memberList = g.getMemberList();
break;
}
}
}
}
}
return memberList;
}
/**
* @return the members of this Group in breadth-first order.
*/
public List<HObject> breadthFirstMemberList() {
Vector<HObject> members = new Vector<>();
Queue<HObject> queue = new LinkedList<>();
HObject currentObj = this;
queue.addAll(((Group) currentObj).getMemberList());
while(!queue.isEmpty()) {
currentObj = queue.remove();
members.add(currentObj);
if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) {
queue.addAll(((Group) currentObj).getMemberList());
}
}
return members;
}
/**
* @return the members of this Group in depth-first order.
*/
public List<HObject> depthFirstMemberList() {
Vector<HObject> members = new Vector<>();
Stack<HObject> stack = new Stack<>();
HObject currentObj = this;
// Push elements onto the stack in reverse order
List<HObject> list = ((Group) currentObj).getMemberList();
for(int i = list.size() - 1; i >= 0; i--) {
stack.push(list.get(i));
}
while(!stack.empty()) {
currentObj = stack.pop();
members.add(currentObj);
if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) {
list = ((Group) currentObj).getMemberList();
for(int i = list.size() - 1; i >= 0; i--) {
stack.push(list.get(i));
}
}
}
return members;
}
/**
* Sets the name of the group.
* <p>
* setName (String newName) changes the name of the group in memory and
* file.
* <p>
* setName() updates the path in memory for all the objects that are under
* the group with the new name.
*
* @param newName
* The new name of the group.
*
* @throws Exception if the name can not be set
*/
@Override
public void setName(String newName) throws Exception {
super.setName(newName);
if (memberList != null) {
int n = memberList.size();
HObject theObj = null;
for (int i = 0; i < n; i++) {
theObj = memberList.get(i);
theObj.setPath(this.getPath() + newName + HObject.SEPARATOR);
}
}
}
/** @return the parent group. */
public final Group getParent() {
return parent;
}
/**
* Checks if it is a root group.
*
* @return true if the group is a root group; otherwise, returns false.
*/
public final boolean isRoot() {
return (parent == null);
}
/**
* Returns the total number of members of this group in file.
*
* Current Java applications such as HDFView cannot handle files with large
* numbers of objects (1,000,000 or more objects) due to JVM memory
* limitation. The max_members is used so that applications such as HDFView
* will load up to <i>max_members</i> number of objects. If the number of
* objects in file is larger than <i>max_members</i>, only
* <i>max_members</i> are loaded in memory.
* <p>
* getNumberOfMembersInFile() returns the number of objects in this group.
* The number of objects in memory is obtained by getMemberList().size().
*
* @return Total number of members of this group in the file.
*/
public int getNumberOfMembersInFile() {
return nMembersInFile;
}
/**
* Get the HObject at the specified index in this Group's member list.
* @param idx The index of the HObject to get.
* @return The HObject at the specified index.
*/
public HObject getMember(int idx) {
if(memberList.size() <= 0 || idx >= memberList.size()) return null;
return memberList.get(idx);
}
}

View File

@ -0,0 +1,562 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
import java.io.Serializable;
/**
* The HObject class is the root class of all the HDF data objects. Every data
* class has HObject as a superclass. All objects implement the methods of this
* class. The following is the inherited structure of HDF Objects.
*
* <pre>
* HObject
* __________________________|________________________________
* | | |
* Group Dataset Datatype
* | _________|___________ |
* | | | |
* | ScalarDS CompoundDS |
* | | | |
* ---------------------Implementing classes such as-------------------------
* ____|____ _____|______ _____|_____ _____|_____
* | | | | | | | |
* H5Group H4Group H5ScalarDS H4ScalarDS H5CompDS H4CompDS H5Datatype H4Datatype
*
* </pre>
*
* All HDF4 and HDF5 data objects are inherited from HObject. At the top level
* of the hierarchy, both HDF4 and HDF5 have the same super-classes, such as
* Group and Dataset. At the bottom level of the hierarchy, HDF4 and HDF5
* objects have their own implementation, such as H5Group, H5ScalarDS,
* H5CompoundDS, and H5Datatype.
* <p>
* <b>Warning: HDF4 and HDF5 may have multiple links to the same object. Data
* objects in this model do not deal with multiple links. Users may create
* duplicate copies of the same data object with different paths. Applications
* should check the OID of the data object to avoid duplicate copies of the same
* object.</b>
* <p>
* HDF4 objects are uniquely identified by the OID (tag_id, ref_id) pair. The
* ref_id is the object reference count. The tag_id is a pre-defined number to
* identify the type of object. For example, DFTAG_RI is for raster image,
* DFTAG_SD is for scientific dataset, and DFTAG_VG is for Vgroup.
* <p>
* HDF5 objects are uniquely identified by the OID containing just the object
* reference. The OID is usually obtained by H5Rcreate(). The following example
* shows how to retrieve an object ID from a file:
*
* <pre>
* // retrieve the object ID
* try {
* byte[] ref_buf = H5.H5Rcreate(h5file.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
* long[] oid = new long[1];
* oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
* } catch (Exception ex) {
* }
* </pre>
*
* @version 2.0 4/2/2018
* @author Peter X. Cao, Jordan T. Henderson
* @see <a href="DataFormat.html">hdf.object.DataFormat</a>
*/
public abstract class HObject implements Serializable {
/**
* The serialVersionUID is a universal version identifier for a Serializable
* class. Deserialization uses this number to ensure that a loaded class
* corresponds exactly to a serialized object. For details, see
* http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html
*/
private static final long serialVersionUID = -1723666708199882519L;
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(HObject.class);
/**
* The separator of object path, i.e. "/".
*/
public static final String SEPARATOR = "/";
/**
* The full path of the file that contains the object.
*/
private String filename;
/**
* The file which contains the object
*/
protected final FileFormat fileFormat;
/**
* The name of the data object. The root group has its default name, a
* slash. The name can be changed except the root group.
*/
private String name;
/**
* The full path of the data object. The full path always starts with the
* root, a slash. The path cannot be changed. Also, a path must be ended with a
* slash. For example, /arrays/ints/
*/
private String path;
/** The full name of the data object, i.e. "path + name" */
private String fullName;
/**
* Array of long integer storing unique identifier for the object.
* <p>
* HDF4 objects are uniquely identified by a (tag_id, ref_id) pair. i.e.
* oid[0] = tag, oid[1] = ref_id.<br>
* HDF5 objects are uniquely identified by an object reference. i.e.
* oid[0] = obj_id.
*/
protected long[] oid;
/**
* The name of the Target Object that is being linked to.
*/
protected String linkTargetObjName;
/**
* Number of attributes attached to the object.
*/
// protected int nAttributes = -1;
/**
* Constructs an instance of a data object without name and path.
*/
public HObject() {
this(null, null, null, null);
}
/**
* Constructs an instance of a data object with specific name and path.
* <p>
* For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name
* of the dataset, "/arrays" is the group path of the dataset.
*
* @param theFile
* the file that contains the data object.
* @param theName
* the name of the data object, e.g. "dset".
* @param thePath
* the group path of the data object, e.g. "/arrays".
*/
public HObject(FileFormat theFile, String theName, String thePath) {
this(theFile, theName, thePath, null);
}
/**
* Constructs an instance of a data object with specific name and path.
* <p>
* For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name
* of the dataset, "/arrays" is the group path of the dataset.
*
* @param theFile
* the file that contains the data object.
* @param theName
* the name of the data object, e.g. "dset".
* @param thePath
* the group path of the data object, e.g. "/arrays".
* @param oid
* the ids of the data object.
*/
@Deprecated
public HObject(FileFormat theFile, String theName, String thePath, long[] oid) {
this.fileFormat = theFile;
this.oid = oid;
if (fileFormat != null) {
this.filename = fileFormat.getFilePath();
}
else {
this.filename = null;
}
// file name is packed in the full path
if ((theName == null) && (thePath != null)) {
if (thePath.equals(SEPARATOR)) {
theName = SEPARATOR;
thePath = null;
}
else {
// the path must starts with "/"
if (!thePath.startsWith(HObject.SEPARATOR)) {
thePath = HObject.SEPARATOR + thePath;
}
// get rid of the last "/"
if (thePath.endsWith(HObject.SEPARATOR)) {
thePath = thePath.substring(0, thePath.length() - 1);
}
// separate the name and the path
theName = thePath.substring(thePath.lastIndexOf(SEPARATOR) + 1);
thePath = thePath.substring(0, thePath.lastIndexOf(SEPARATOR));
}
}
else if ((theName != null) && (thePath == null) && (theName.indexOf(SEPARATOR) >= 0)) {
if (theName.equals(SEPARATOR)) {
theName = SEPARATOR;
thePath = null;
}
else {
// the full name must starts with "/"
if (!theName.startsWith(SEPARATOR)) {
theName = SEPARATOR + theName;
}
// the fullname must not end with "/"
int n = theName.length();
if (theName.endsWith(SEPARATOR)) {
theName = theName.substring(0, n - 1);
}
int idx = theName.lastIndexOf(SEPARATOR);
if (idx < 0) {
thePath = SEPARATOR;
}
else {
thePath = theName.substring(0, idx);
theName = theName.substring(idx + 1);
}
}
}
// the path must start and end with "/"
if (thePath != null) {
thePath = thePath.replaceAll("//", "/");
if (!thePath.endsWith(SEPARATOR)) {
thePath += SEPARATOR;
}
}
this.name = theName;
this.path = thePath;
log.trace("name={} path={}", this.name, this.path);
if (thePath != null) {
this.fullName = thePath + theName;
}
else {
if (theName == null) {
this.fullName = "/";
}
else if (theName.startsWith("/")) {
this.fullName = theName;
}
else {
if (this instanceof Attribute)
this.fullName = theName;
else
this.fullName = "/" + theName;
}
}
log.trace("fullName={}", this.fullName);
}
/**
* Print out debug information
* <p>
*
* @param msg
* the debug message to print
*/
protected final void debug(Object msg) {
System.out.println("*** " + this.getClass().getName() + ": " + msg);
}
/**
* Returns the name of the file that contains this data object.
* <p>
* The file name is necessary because the file of this data object is
* uniquely identified when multiple files are opened by an application at
* the same time.
*
* @return The full path (path + name) of the file.
*/
public final String getFile() {
return filename;
}
/**
* Returns the name of the object. For example, "Raster Image #2".
*
* @return The name of the object.
*/
public final String getName() {
return name;
}
/**
* Returns the name of the target object that is linked to.
*
* @return The name of the object that is linked to.
*/
public final String getLinkTargetObjName() {
return linkTargetObjName;
}
/**
* Sets the name of the target object that is linked to.
*
* @param targetObjName
* The new name of the object.
*/
public final void setLinkTargetObjName(String targetObjName) {
linkTargetObjName = targetObjName;
}
/**
* Returns the full name (group path + object name) of the object. For
* example, "/Images/Raster Image #2"
*
* @return The full name (group path + object name) of the object.
*/
public final String getFullName() {
return fullName;
}
/**
* Returns the group path of the object. For example, "/Images".
*
* @return The group path of the object.
*/
public final String getPath() {
return path;
}
/**
* Sets the name of the object.
*
* setName (String newName) changes the name of the object in the file.
*
* @param newName
* The new name of the object.
*
* @throws Exception if name is root or contains separator
*/
public void setName(String newName) throws Exception {
if (newName != null) {
if (newName.equals(HObject.SEPARATOR)) {
throw new IllegalArgumentException("The new name cannot be the root");
}
if (newName.startsWith(HObject.SEPARATOR)) {
newName = newName.substring(1);
}
if (newName.endsWith(HObject.SEPARATOR)) {
newName = newName.substring(0, newName.length() - 2);
}
if (newName.contains(HObject.SEPARATOR)) {
throw new IllegalArgumentException("The new name contains the SEPARATOR character: " + HObject.SEPARATOR);
}
}
name = newName;
}
/**
* Sets the path of the object.
* <p>
* setPath() is needed to change the path for an object when the name of a
* group containing the object is changed by setName(). The path of the
* object in memory under this group should be updated to the new path to
* the group. Unlike setName(), setPath() does not change anything in file.
*
* @param newPath
* The new path of the object.
*
* @throws Exception if a failure occurred
*/
public void setPath(String newPath) throws Exception {
if (newPath == null) {
newPath = "/";
}
path = newPath;
}
/**
* Opens an existing object such as a dataset or group for access.
*
* The return value is an object identifier obtained by implementing classes
* such as H5.H5Dopen(). This function is needed to allow other objects to
* be able to access the object. For instance, H5File class uses the open()
* function to obtain object identifier for copyAttributes(long src_id, long
* dst_id) and other purposes. The open() function should be used in pair
* with close(long) function.
*
* @see HObject#close(long)
*
* @return the object identifier if successful; otherwise returns a negative
* value.
*/
public abstract long open();
/**
* Closes access to the object.
* <p>
* Sub-classes must implement this interface because different data objects
* have their own ways of how the data resources are closed.
* <p>
* For example, H5Group.close() calls the hdf.hdf5lib.H5.H5Gclose()
* method and closes the group resource specified by the group id.
*
* @param id
* The object identifier.
*/
public abstract void close(long id);
/**
* Returns the file identifier of of the file containing the object.
*
* @return the file identifier of of the file containing the object.
*/
public final long getFID() {
if (fileFormat != null) {
return fileFormat.getFID();
}
else {
return -1;
}
}
/**
* Returns the file that contains the object.
*
* @return The file that contains the object.
*/
public final FileFormat getFileFormat() {
return fileFormat;
}
/**
* Returns a cloned copy of the object identifier.
* <p>
* The object OID cannot be modified once it is created. getOID() clones the object OID to ensure
* the object OID cannot be modified outside of this class.
*
* @return the cloned copy of the object OID.
*/
public final long[] getOID() {
if (oid == null) {
return null;
}
return oid.clone();
}
/**
* Checks if the OID of the object is the same as the given object identifier within the same file.
* <p>
* HDF4 and HDF5 data objects are identified by their unique OIDs. A data object in a file may have
* multiple logical names , which are represented in a graph structure as separate objects.
* <p>
* The HObject.equalsOID(long[] theID) can be used to check if two data objects with different names
* are pointed to the same object within the same file.
*
* @param theID
* The list object identifiers.
*
* @return true if the ID of the object equals the given OID; otherwise, returns false.
*/
public final boolean equalsOID(long[] theID) {
if ((theID == null) || (oid == null)) {
return false;
}
int n1 = theID.length;
int n2 = oid.length;
if (n1 == 0 || n2 == 0) {
return false;
}
int n = Math.min(n1, n2);
boolean isMatched = (theID[0] == oid[0]);
for (int i = 1; isMatched && (i < n); i++) {
isMatched = (theID[i] == oid[i]);
}
return isMatched;
}
/**
* Returns the name of the object.
* <p>
* This method overwrites the toString() method in the Java Object class
* (the root class of all Java objects) so that it returns the name of the
* HObject instead of the name of the class.
* <p>
* For example, toString() returns "Raster Image #2" instead of
* "hdf.object.h4.H4SDS".
*
* @return The name of the object.
*/
@Override
public String toString() {
if (this instanceof Group) {
if (((Group) this).isRoot() && this.getFileFormat() != null) return this.getFileFormat().getName();
}
if (name != null) return name;
return super.toString();
}
/**
* Returns whether this HObject is equal to the specified HObject by comparing their OIDs.
*
* @param obj
* The object
*
* @return true if the object is equal by OID
*/
public boolean equals(HObject obj) {
// Cast down to Object to avoid infinite recursion
if (this.equals((Object) obj))
return true;
// comparing the state of OID with
// the state of 'this' OID.
return this.equalsOID(obj.getOID());
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
// checking if both the object references are
// referring to the same object.
if (this == obj)
return true;
return false;
}
@Override
public int hashCode() {
// We are returning the OID as a hashcode value.
return (int) oid[0];
}
}

View File

@ -0,0 +1,91 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
import java.util.List;
/**
* An interface that provides general I/O operations for object metadata
* attached to an object. For example, reading metadata content from the file
* into memory or writing metadata content from memory into the file.
* <p>
*
* @see HObject
*
* @version 2.0 4/2/2018
* @author Peter X. Cao, Jordan T. Henderson
*/
@SuppressWarnings("rawtypes")
public interface MetaDataContainer {
/**
* Retrieves the object's metadata, such as attributes, from the file.
* <p>
* Metadata, such as attributes, is stored in a List.
*
* @return the list of metadata objects.
*
* @throws Exception
* if the metadata can not be retrieved
*/
public abstract List getMetadata() throws Exception;
/**
* Writes a specific piece of metadata (such as an attribute) into the file.
*
* If an HDF(4&amp;5) attribute exists in the file, this method updates its
* value. If the attribute does not exist in the file, it creates the
* attribute in the file and attaches it to the object. It will fail to
* write a new attribute to the object where an attribute with the same name
* already exists. To update the value of an existing attribute in the file,
* one needs to get the instance of the attribute by getMetadata(), change
* its values, then use writeMetadata() to write the value.
*
* @param metadata
* the metadata to write.
*
* @throws Exception
* if the metadata can not be written
*/
public abstract void writeMetadata(Object metadata) throws Exception;
/**
* Deletes an existing piece of metadata from this object.
*
* @param metadata
* the metadata to delete.
*
* @throws Exception
* if the metadata can not be removed
*/
public abstract void removeMetadata(Object metadata) throws Exception;
/**
* Updates an existing piece of metadata attached to this object.
*
* @param metadata
* the metadata to update.
*
* @throws Exception
* if the metadata can not be updated
*/
public abstract void updateMetadata(Object metadata) throws Exception;
/**
* Check if the object has any attributes attached.
*
* @return true if it has any attributes, false otherwise.
*/
public abstract boolean hasAttribute();
}

View File

@ -0,0 +1,450 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
/**
* A scalar dataset is a multiple dimension array of scalar points. The Datatype of a scalar dataset must be an atomic
* datatype. Common datatypes of scalar datasets include char, byte, short, int, long, float, double and string.
* <p>
* A ScalarDS can be an image or spreadsheet data. ScalarDS defines methods to deal with both images and
* spreadsheets.
* <p>
* ScalarDS is an abstract class. Current implementing classes are the H4SDS, H5GRImage and H5ScalarDS.
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public abstract class ScalarDS extends Dataset {
private static final long serialVersionUID = 8925371455928203981L;
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ScalarDS.class);
/************************************************************
* The following constant strings are copied from *
* https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html *
* to make the definition consistent with the image specs. *
************************************************************/
/**
* Indicates that the pixel RGB values are contiguous.
*/
public static final int INTERLACE_PIXEL = 0;
/** Indicates that each pixel component of RGB is stored as a scan line. */
public static final int INTERLACE_LINE = 1;
/** Indicates that each pixel component of RGB is stored as a plane. */
public static final int INTERLACE_PLANE = 2;
/**
* The interlace mode of the stored raster image data. Valid values are INTERLACE_PIXEL, INTERLACE_LINE and
* INTERLACE_PLANE.
*/
protected int interlace;
/**
* The min-max range of image data values. For example, [0, 255] indicates the min is 0, and the max is 255.
*/
protected double[] imageDataRange;
/**
* The indexed RGB color model with 256 colors.
* <p>
* The palette values are stored in a two-dimensional byte array and arrange by color components of red, green and
* blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue
* components respectively.
*/
protected byte[][] palette;
/**
* True if this dataset is an image.
*/
protected boolean isImage;
/**
* True if this dataset is a true color image.
*/
protected boolean isTrueColor;
/**
* True if this dataset is ASCII text.
*/
protected boolean isText;
/**
* Flag to indicate is the original unsigned C data is converted.
*/
protected boolean unsignedConverted;
/** The fill value of the dataset. */
protected Object fillValue = null;
private List<Number> filteredImageValues;
/** Flag to indicate if the dataset is displayed as an image. */
protected boolean isImageDisplay;
/**
* Flag to indicate if the dataset is displayed as an image with default order of dimensions.
*/
protected boolean isDefaultImageOrder;
/**
* Flag to indicate if the FillValue is converted from unsigned C.
*/
public boolean isFillValueConverted;
/**
* Constructs an instance of a ScalarDS with specific name and path. An HDF data object must have a name. The path
* is the group path starting from the root.
* <p>
* For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
* path of the dataset.
*
* @param theFile
* the file that contains the data object.
* @param theName
* the name of the data object, e.g. "dset".
* @param thePath
* the full path of the data object, e.g. "/arrays/".
*/
public ScalarDS(FileFormat theFile, String theName, String thePath) {
this(theFile, theName, thePath, null);
}
/**
* @deprecated Not for public use in the future.<br>
* Using {@link #ScalarDS(FileFormat, String, String)}
*
* @param theFile
* the file that contains the data object.
* @param theName
* the name of the data object, e.g. "dset".
* @param thePath
* the full path of the data object, e.g. "/arrays/".
* @param oid
* the v of the data object.
*/
@Deprecated
public ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
super(theFile, theName, thePath, oid);
palette = null;
isImage = false;
isTrueColor = false;
isText = false;
interlace = -1;
imageDataRange = null;
isImageDisplay = false;
isDefaultImageOrder = true;
isFillValueConverted = false;
filteredImageValues = new Vector<>();
}
/*
* (non-Javadoc)
*
* @see hdf.object.Dataset#clearData()
*/
@Override
public void clearData() {
super.clearData();
unsignedConverted = false;
}
/**
* Converts the data values of this dataset to appropriate Java integer if they are unsigned integers.
*
* @see Dataset#convertToUnsignedC(Object)
* @see Dataset#convertFromUnsignedC(Object, Object)
*
* @return the converted data buffer.
*/
@Override
public Object convertFromUnsignedC() {
log.trace("convertFromUnsignedC(): start");
// keep a copy of original buffer and the converted buffer
// so that they can be reused later to save memory
log.trace("convertFromUnsignedC(): unsigned={}", getDatatype().isUnsigned());
if ((data != null) && getDatatype().isUnsigned() && !unsignedConverted) {
log.trace("convertFromUnsignedC(): convert");
originalBuf = data;
convertedBuf = convertFromUnsignedC(originalBuf, convertedBuf);
data = convertedBuf;
unsignedConverted = true;
if (fillValue != null) {
if (!isFillValueConverted) {
fillValue = convertFromUnsignedC(fillValue, null);
isFillValueConverted = true;
}
}
}
log.trace("convertFromUnsignedC(): finish");
return data;
}
/**
* Converts Java integer data of this dataset back to unsigned C-type integer data if they are unsigned integers.
*
* @see Dataset#convertToUnsignedC(Object)
* @see Dataset#convertToUnsignedC(Object, Object)
* @see #convertFromUnsignedC(Object data_in)
*
* @return the converted data buffer.
*/
@Override
public Object convertToUnsignedC() {
log.trace("convertToUnsignedC(): start");
// keep a copy of original buffer and the converted buffer
// so that they can be reused later to save memory
log.trace("convertToUnsignedC(): unsigned={}", getDatatype().isUnsigned());
if ((data != null) && getDatatype().isUnsigned()) {
log.trace("convertToUnsignedC(): convert");
convertedBuf = data;
originalBuf = convertToUnsignedC(convertedBuf, originalBuf);
data = originalBuf;
}
log.trace("convertToUnsignedC(): finish");
return data;
}
/**
* Returns the palette of this scalar dataset or null if palette does not exist.
* <p>
* A Scalar dataset can be displayed as spreadsheet data or an image. When a scalar dataset is displayed as an
* image, the palette or color table may be needed to translate a pixel value to color components (for example, red,
* green, and blue). Some scalar datasets have no palette and some datasets have one or more than one palettes. If
* an associated palette exists but is not loaded, this interface retrieves the palette from the file and returns the
* palette. If the palette is loaded, it returns the palette. It returns null if there is no palette associated with
* the dataset.
* <p>
* Current implementation only supports palette model of indexed RGB with 256 colors. Other models such as
* YUV", "CMY", "CMYK", "YCbCr", "HSV will be supported in the future.
* <p>
* The palette values are stored in a two-dimensional byte array and are arranges by color components of red, green and
* blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue
* components respectively.
* <p>
* Sub-classes have to implement this interface. HDF4 and HDF5 images use different libraries to retrieve the
* associated palette.
*
* @return the 2D palette byte array.
*/
public abstract byte[][] getPalette();
/**
* Sets the palette for this dataset.
*
* @param pal
* the 2D palette byte array.
*/
public final void setPalette(byte[][] pal) {
palette = pal;
}
/**
* Reads a specific image palette from file.
* <p>
* A scalar dataset may have multiple palettes attached to it. readPalette(int idx) returns a specific palette
* identified by its index.
*
* @param idx
* the index of the palette to read.
*
* @return the image palette
*/
public abstract byte[][] readPalette(int idx);
/**
* Get the name of a specific image palette from file.
* <p>
* A scalar dataset may have multiple palettes attached to it. getPaletteName(int idx) returns the name of a
* specific palette identified by its index.
*
* @param idx
* the index of the palette to retrieve the name.
*
* @return The name of the palette
*/
public String getPaletteName(int idx) {
String paletteName = "Default ";
if (idx != 0)
paletteName = "Default " + idx;
return paletteName;
}
/**
* Returns the byte array of palette refs.
* <p>
* A palette reference is an object reference that points to the palette dataset.
* <p>
* For example, Dataset "Iceberg" has an attribute of object reference "Palette". The arrtibute "Palette" has value
* "2538" that is the object reference of the palette data set "Iceberg Palette".
*
* @return null if there is no palette attribute attached to this dataset.
*/
public abstract byte[] getPaletteRefs();
/**
* Returns true if this dataset is an image.
* <p>
* For all Images, they must have an attribute called "CLASS". The value of this attribute is "IMAGE". For more
* details, read <a href="https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>
*
* @return true if the dataset is an image; otherwise, returns false.
*/
public final boolean isImage() {
return isImage;
}
/**
* Returns true if this dataset is displayed as an image.
* <p>
* A ScalarDS can be displayed as an image or a spreadsheet in a table.
*
* @return true if this dataset is displayed as an image; otherwise, returns false.
*/
public final boolean isImageDisplay() {
return isImageDisplay;
}
/**
* Returns true if this dataset is displayed as an image with default image order.
* <p>
* A ScalarDS can be displayed as an image with different orders of dimensions.
*
* @return true if this dataset is displayed as an image with default image order; otherwise, returns false.
*/
public final boolean isDefaultImageOrder() {
return isDefaultImageOrder;
}
/**
* Sets the flag to display the dataset as an image.
*
* @param b
* if b is true, display the dataset as an image
*/
public final void setIsImageDisplay(boolean b) {
isImageDisplay = b;
}
/**
* Sets the flag to indicate this dataset is an image.
*
* @param b
* if b is true, the dataset is an image.
*/
public final void setIsImage(boolean b) {
isImage = b;
}
/**
* Sets data range for an image.
*
* @param min
* the data range start.
* @param max
* the data range end.
*/
public final void setImageDataRange(double min, double max) {
if (max <= min)
return;
if (imageDataRange == null)
imageDataRange = new double[2];
imageDataRange[0] = min;
imageDataRange[1] = max;
}
/**
* Add a value that will be filtered out in an image.
*
* @param x
* value to be filtered
*/
public void addFilteredImageValue(Number x) {
Iterator<Number> it = filteredImageValues.iterator();
while (it.hasNext()) {
if (it.next().toString().equals(x.toString()))
return;
}
filteredImageValues.add(x);
}
/**
* Get a list of values that will be filtered out in an image.
*
* @return the list of Image values
*/
public List<Number> getFilteredImageValues() {
return filteredImageValues;
}
/**
* @return true if this dataset is a true color image.
*
*/
public final boolean isTrueColor() {
return isTrueColor;
}
/**
* Returns the interlace mode of a true color image (RGB).
*
* Valid values:
*
* <pre>
* INTERLACE_PIXEL -- RGB components are contiguous, i.e. rgb, rgb, rgb, ...
* INTERLACE_LINE -- each RGB component is stored as a scan line
* INTERLACE_PLANE -- each RGB component is stored as a plane
* </pre>
*
* @return the interlace mode of a true color image (RGB).
*/
public final int getInterlace() {
return interlace;
}
/**
* Returns the (min, max) pair of image data range.
*
* @return the (min, max) pair of image data range.
*/
public double[] getImageDataRange() {
return imageDataRange;
}
/**
* Returns the fill values for the dataset.
*
* @return the fill values for the dataset.
*/
@Override
public final Object getFillValue() {
return fillValue;
}
}

View File

@ -0,0 +1,48 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object;
public final class Utils {
private Utils() {
throw new IllegalStateException("Utility class");
}
/**
* Retrieves the Java Runtime Class of the given Object. B = byte array, S = short array, I = int
* array, J = long array, F = float array, D = double array, L = class or interface
*
* @param o
* the Object to determine the Runtime Class of
* @return the Java Runtime Class of the given Object.
*/
public static char getJavaObjectRuntimeClass(Object o) {
if (o == null)
return ' ';
String cName = o.getClass().getName();
if (cName.equals("java.lang.String") || cName.equals("java.util.Vector")
|| cName.equals("java.util.Arrays$ArrayList") || cName.equals("java.util.ArrayList"))
return 'L';
int cIndex = cName.lastIndexOf('[');
if (cIndex >= 0) {
return cName.charAt(cIndex + 1);
}
return ' ';
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,506 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object.h5;
import java.util.List;
import java.util.Vector;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
import hdf.hdf5lib.HDFNativeData;
import hdf.hdf5lib.exceptions.HDF5Exception;
import hdf.hdf5lib.structs.H5G_info_t;
import hdf.hdf5lib.structs.H5O_info_t;
import hdf.object.Attribute;
import hdf.object.FileFormat;
import hdf.object.Group;
import hdf.object.HObject;
/**
* An H5Group object represents an existing HDF5 group in file.
* <p>
* In HDF5, every object has at least one name. An HDF5 group is used to store a
* set of the names together in one place, i.e. a group. The general structure
* of a group is similar to that of the UNIX file system in that the group may
* contain references to other groups or data objects just as the UNIX directory
* may contain sub-directories or files.
* <p>
* For more information on HDF5 Groups,
*
* <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public class H5Group extends Group {
private static final long serialVersionUID = -951164512330444150L;
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Group.class);
/**
* The list of attributes of this data object. Members of the list are
* instance of Attribute.
*/
@SuppressWarnings("rawtypes")
protected List attributeList;
private int nAttributes = -1;
private H5O_info_t obj_info;
/**
* Constructs an HDF5 group with specific name, path, and parent.
*
* @param theFile
* the file which containing the group.
* @param name
* the name of this group, e.g. "grp01".
* @param path
* the full path of this group, e.g. "/groups/".
* @param parent
* the parent of this group.
*/
public H5Group(FileFormat theFile, String name, String path, Group parent) {
this(theFile, name, path, parent, null);
}
/**
* @deprecated Not for public use in the future.<br>
* Using {@link #H5Group(FileFormat, String, String, Group)}
*
* @param theFile
* the file which containing the group.
* @param name
* the name of this group, e.g. "grp01".
* @param path
* the full path of this group, e.g. "/groups/".
* @param parent
* the parent of this group.
* @param oid
* the oid of this group.
*/
@Deprecated
public H5Group(FileFormat theFile, String name, String path, Group parent, long[] oid) {
super(theFile, name, path, parent, oid);
nMembersInFile = -1;
obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
if ((oid == null) && (theFile != null)) {
// retrieve the object ID
try {
byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
this.oid = new long[1];
this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
}
catch (Exception ex) {
this.oid = new long[1];
this.oid[0] = 0;
}
}
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#hasAttribute()
*/
@Override
public boolean hasAttribute() {
obj_info.num_attrs = nAttributes;
if (obj_info.num_attrs < 0) {
long gid = open();
if (gid > 0) {
try {
obj_info = H5.H5Oget_info(gid);
}
catch (Exception ex) {
obj_info.num_attrs = 0;
}
close(gid);
}
}
log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
return (obj_info.num_attrs > 0);
}
/*
* (non-Javadoc)
*
* @see hdf.object.Group#getNumberOfMembersInFile()
*/
@Override
public int getNumberOfMembersInFile() {
if (nMembersInFile < 0) {
long gid = open();
if (gid > 0) {
try {
H5G_info_t group_info = null;
group_info = H5.H5Gget_info(gid);
nMembersInFile = (int) group_info.nlinks;
}
catch (Exception ex) {
nMembersInFile = 0;
}
close(gid);
}
}
return nMembersInFile;
}
/*
* (non-Javadoc)
*
* @see hdf.object.Group#clear()
*/
@SuppressWarnings("rawtypes")
@Override
public void clear() {
super.clear();
if (attributeList != null) {
((Vector) attributeList).setSize(0);
}
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#getMetadata()
*/
@Override
@SuppressWarnings("rawtypes")
public List getMetadata() throws HDF5Exception {
return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#getMetadata(int...)
*/
@SuppressWarnings("rawtypes")
public List getMetadata(int... attrPropList) throws HDF5Exception {
log.trace("getMetadata(): start");
if (attributeList == null) {
log.trace("getMetadata(): get attributeList");
int indxType = fileFormat.getIndexType(null);
int order = fileFormat.getIndexOrder(null);
if (attrPropList.length > 0) {
indxType = attrPropList[0];
if (attrPropList.length > 1) {
order = attrPropList[1];
}
}
try {
attributeList = H5File.getAttribute(this, indxType, order);
}
catch (Exception ex) {
log.debug("getMetadata(): H5File.getAttribute failure: ", ex);
}
}
try {
if (!this.isRoot()) this.linkTargetObjName = H5File.getLinkTargetName(this);
}
catch (Exception ex) {
log.debug("getMetadata(): getLinkTargetName failure: ", ex);
}
log.trace("getMetadata(): finish");
return attributeList;
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
*/
@Override
@SuppressWarnings("unchecked")
public void writeMetadata(Object info) throws Exception {
log.trace("writeMetadata(): start");
// only attribute metadata is supported.
if (!(info instanceof Attribute)) {
log.debug("writeMetadata(): Object not an Attribute");
log.trace("writeMetadata(): finish");
return;
}
boolean attrExisted = false;
Attribute attr = (Attribute) info;
log.trace("writeMetadata(): {}", attr.getName());
if (attributeList == null) {
this.getMetadata();
}
if (attributeList != null) attrExisted = attributeList.contains(attr);
getFileFormat().writeAttribute(this, attr, attrExisted);
// add the new attribute into attribute list
if (!attrExisted) {
attributeList.add(attr);
nAttributes = attributeList.size();
}
log.trace("writeMetadata(): finish");
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
*/
@Override
@SuppressWarnings("rawtypes")
public void removeMetadata(Object info) throws HDF5Exception {
log.trace("removeMetadata(): start");
// only attribute metadata is supported.
if (!(info instanceof Attribute)) {
log.debug("removeMetadata(): Object not an Attribute");
log.trace("removeMetadata(): finish");
return;
}
Attribute attr = (Attribute) info;
log.trace("removeMetadata(): {}", attr.getName());
long gid = open();
if(gid >= 0) {
try {
H5.H5Adelete(gid, attr.getName());
List attrList = getMetadata();
attrList.remove(attr);
nAttributes = attributeList.size();
}
finally {
close(gid);
}
}
else {
log.debug("removeMetadata(): failed to open group");
}
log.trace("removeMetadata(): finish");
}
/*
* (non-Javadoc)
*
* @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
*/
@Override
public void updateMetadata(Object info) throws HDF5Exception {
log.trace("updateMetadata(): start");
// only attribute metadata is supported.
if (!(info instanceof Attribute)) {
log.debug("updateMetadata(): Object not an Attribute");
log.trace("updateMetadata(): finish");
return;
}
nAttributes = -1;
log.trace("updateMetadata(): finish");
}
/*
* (non-Javadoc)
*
* @see hdf.object.HObject#open()
*/
@Override
public long open() {
log.trace("open(): start");
long gid = -1;
try {
if (isRoot()) {
gid = H5.H5Gopen(getFID(), SEPARATOR, HDF5Constants.H5P_DEFAULT);
}
else {
gid = H5.H5Gopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
}
}
catch (HDF5Exception ex) {
gid = -1;
}
log.trace("open(): finish");
return gid;
}
/*
* (non-Javadoc)
*
* @see hdf.object.HObject#close(int)
*/
@Override
public void close(long gid) {
try {
H5.H5Gclose(gid);
}
catch (HDF5Exception ex) {
log.debug("close(): H5Gclose(gid {}): ", gid, ex);
}
}
/**
* Creates a new group with a name in a group and with the group creation
* properties specified in gplist.
* <p>
* The gplist contains a sequence of group creation property list
* identifiers, lcpl, gcpl, gapl. It allows the user to create a group with
* group creation properties. It will close the group creation properties
* specified in gplist.
*
* @see H5#H5Gcreate(long, String, long, long, long) for the
* order of property list identifiers.
*
* @param name
* The name of a new group.
* @param pgroup
* The parent group object.
* @param gplist
* The group creation properties, in which the order of the
* properties conforms the HDF5 library API, H5Gcreate(), i.e.
* lcpl, gcpl and gapl, where
* <ul>
* <li>lcpl : Property list for link creation <li>gcpl : Property
* list for group creation <li>gapl : Property list for group
* access
* </ul>
*
* @return The new group if successful; otherwise returns null.
*
* @throws Exception if there is a failure.
*/
public static H5Group create(String name, Group pgroup, long... gplist) throws Exception {
log.trace("create(): start");
H5Group group = null;
String fullPath = null;
long lcpl = HDF5Constants.H5P_DEFAULT;
long gcpl = HDF5Constants.H5P_DEFAULT;
long gapl = HDF5Constants.H5P_DEFAULT;
if (gplist.length > 0) {
lcpl = gplist[0];
if (gplist.length > 1) {
gcpl = gplist[1];
if (gplist.length > 2) gapl = gplist[2];
}
}
if ((name == null) || (pgroup == null)) {
log.debug("create(): one or more parameters are null");
log.trace("create(): finish");
System.err.println("(name == null) || (pgroup == null)");
return null;
}
H5File file = (H5File) pgroup.getFileFormat();
if (file == null) {
log.debug("create(): Parent Group FileFormat is null");
log.trace("create(): finish");
System.err.println("Could not get file that contains object");
return null;
}
String path = HObject.SEPARATOR;
if (!pgroup.isRoot()) {
path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
if (name.endsWith("/")) {
name = name.substring(0, name.length() - 1);
}
int idx = name.lastIndexOf('/');
if (idx >= 0) {
name = name.substring(idx + 1);
}
}
fullPath = path + name;
// create a new group and add it to the parent node
long gid = H5.H5Gcreate(file.open(), fullPath, lcpl, gcpl, gapl);
try {
H5.H5Gclose(gid);
}
catch (Exception ex) {
log.debug("create(): H5Gcreate {} H5Gclose(gid {}) failure: ", fullPath, gid, ex);
}
byte[] ref_buf = H5.H5Rcreate(file.open(), fullPath, HDF5Constants.H5R_OBJECT, -1);
long l = HDFNativeData.byteToLong(ref_buf, 0);
long[] oid = { l };
group = new H5Group(file, name, path, pgroup, oid);
if (group != null) {
pgroup.addToMemberList(group);
}
if (gcpl > 0) {
try {
H5.H5Pclose(gcpl);
}
catch (final Exception ex) {
log.debug("create(): create prop H5Pclose(gcpl {}) failure: ", gcpl, ex);
}
}
log.trace("create(): finish");
return group;
}
/*
* (non-Javadoc)
*
* @see hdf.object.HObject#setName(java.lang.String)
*/
@Override
public void setName(String newName) throws Exception {
H5File.renameObject(this, newName);
super.setName(newName);
}
/*
* (non-Javadoc)
*
* @see hdf.object.HObject#setPath(java.lang.String)
*/
@SuppressWarnings("rawtypes")
@Override
public void setPath(String newPath) throws Exception {
super.setPath(newPath);
List members = this.getMemberList();
if (members == null) {
return;
}
int n = members.size();
HObject obj = null;
for (int i = 0; i < n; i++) {
obj = (HObject) members.get(i);
obj.setPath(getPath() + getName() + HObject.SEPARATOR);
}
}
}

View File

@ -0,0 +1,112 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object.h5;
import java.util.List;
import hdf.hdf5lib.structs.H5O_info_t;
import hdf.object.FileFormat;
import hdf.object.HObject;
import hdf.object.MetaDataContainer;
/**
* An H5Link object represents an existing HDF5 object in file.
* <p>
* H5Link object is an HDF5 object that is either a soft or an external link to
* an object in a file that does not exist. The type of the object is unknown.
* Once the object being linked to is created, and the type is known, then
* H5link object will change its type.
*
* @version 2.7.2 7/6/2010
* @author Nidhi Gupta
*/
public class H5Link extends HObject implements MetaDataContainer {
private static final long serialVersionUID = -8137277460521594367L;
@SuppressWarnings("unused")
private H5O_info_t obj_info;
/**
* Constructs an HDF5 link with specific name, path, and parent.
*
* @param theFile
* the file which containing the link.
* @param name
* the name of this link, e.g. "link1".
* @param path
* the full path of this link, e.g. "/groups/".
*/
public H5Link(FileFormat theFile, String name, String path) {
this (theFile, name, path, null);
}
@SuppressWarnings("deprecation")
public H5Link(FileFormat theFile, String theName, String thePath,
long[] oid) {
super(theFile, theName, thePath, oid);
obj_info = new H5O_info_t(-1L, -1L, -1, 0, -1L, 0L, 0L, 0L, 0L, null,null,null);
}
@Override
public void close(long id) {
}
@Override
public long open() {
return 0;
}
@SuppressWarnings("rawtypes")
public List getMetadata() throws Exception {
try{
this.linkTargetObjName= H5File.getLinkTargetName(this);
}catch(Exception ex){
}
return null;
}
public boolean hasAttribute() {
return false;
}
public void removeMetadata(Object info) throws Exception {
}
public void writeMetadata(Object info) throws Exception {
}
public void updateMetadata(Object info) throws Exception {
}
@SuppressWarnings("rawtypes")
public List getMetadata(int... attrPropList) throws Exception {
return null;
}
/*
* (non-Javadoc)
*
* @see hdf.object.HObject#setName(java.lang.String)
*/
@Override
public void setName(String newName) throws Exception {
H5File.renameObject(this, newName);
super.setName(newName);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,135 @@
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see https://support.hdfgroup.org/products/licenses.html *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
****************************************************************************/
package hdf.object.h5;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
import hdf.hdf5lib.exceptions.HDF5Exception;
public final class H5Utils {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Utils.class);
/**
* Set up a hyperslab selection within a dataset.
*
* @param did
* IN dataset ID
* @param dsetDims
* IN dimensions
* @param startDims
* IN start dimensions
* @param selectedStride
* IN selected stride values
* @param selectedDims
* IN selected dimensions
* @param spaceIDs
* IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
*
* @return total number of data points selected
*
* @throws HDF5Exception
* If there is an error at the HDF5 library level.
*/
public static final long selectHyperslab(long did, long[] dsetDims, long[] startDims, long[] selectedStride,
long[] selectedDims, long[] spaceIDs) throws HDF5Exception {
log.trace("selectHyperslab(): start");
if (dsetDims == null) {
log.debug("selectHyperslab(): dsetDims is null");
return -1;
}
int rank = dsetDims.length;
if ((startDims != null) && (startDims.length != rank)) {
log.debug("selectHyperslab(): startDims rank didn't match dsetDims rank");
return -1;
}
if ((selectedStride != null) && (selectedStride.length != rank)) {
log.debug("selectHyperslab(): selectedStride rank didn't match startDims rank");
return -1;
}
if ((selectedDims != null) && (selectedDims.length != rank)) {
log.debug("selectHyperslab(): selectedDims rank didn't match startDims rank");
return -1;
}
long lsize = 1;
boolean isAllSelected = true;
for (int i = 0; i < rank; i++) {
if (selectedDims != null) {
lsize *= selectedDims[i];
if (selectedDims[i] < dsetDims[i]) {
isAllSelected = false;
}
}
}
log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
if (isAllSelected) {
spaceIDs[0] = HDF5Constants.H5S_ALL;
spaceIDs[1] = HDF5Constants.H5S_ALL;
}
else {
spaceIDs[1] = H5.H5Dget_space(did);
// When a 1D dataspace is used for a chunked dataset, reading is very slow.
//
// It is a known problem within the HDF5 library.
// mspace = H5.H5Screate_simple(1, lsize, null);
spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null);
}
log.trace("selectHyperslab(): finish");
return lsize;
}
public static final long getTotalSelectedSpacePoints(long did, long[] dsetDims, long[] startDims,
long[] selectedStride, long[] selectedDims, long[] spaceIDs) throws HDF5Exception {
long totalSelectedSpacePoints = selectHyperslab(did, dsetDims, startDims, selectedStride, selectedDims, spaceIDs);
log.trace("getTotalSelectedSpacePoints(): selected {} points in dataset's dataspace", totalSelectedSpacePoints);
if (totalSelectedSpacePoints == 0) {
log.debug("getTotalSelectedSpacePoints(): No data to read. Dataset or selected subset is empty.");
log.trace("getTotalSelectedSpacePoints(): finish");
throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
}
if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
log.debug("getTotalSelectedSpacePoints(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
log.trace("getTotalSelectedSpacePoints(): finish");
throw new HDF5Exception("Invalid int size");
}
if (log.isDebugEnabled()) {
// check is storage space is allocated
try {
long ssize = H5.H5Dget_storage_size(did);
log.trace("getTotalSelectedSpacePoints(): Storage space allocated = {} bytes", ssize);
}
catch (Exception ex) {
log.debug("getTotalSelectedSpacePoints(): check if storage space is allocated:", ex);
}
}
return totalSelectedSpacePoints;
}
}