mirror of
https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5.git
synced 2025-06-24 18:51:08 +02:00
Compare commits
20 Commits
Author | SHA1 | Date | |
---|---|---|---|
e37c671c6e | |||
8246af22a8 | |||
e2958589d8 | |||
8082cdb737 | |||
58407a97af | |||
92f51b5905 | |||
e0b5f96b76 | |||
3e6f26be47 | |||
c7d6f1068c | |||
da539b17dd | |||
75e490fa18 | |||
70fa714aac | |||
2372bf786f | |||
cf0101a25d | |||
3c9fd2b97e | |||
eca2294000 | |||
efc5891e2e | |||
ec38041ddf | |||
8e5ca84a9e | |||
d967b27d7c |
28
.classpath
28
.classpath
@ -1,28 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry including="**/*.java" kind="src" path="src/main/resources"/>
|
||||
<classpathentry including="**/*.java" kind="src" path="src/test/resources"/>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,3 +1,7 @@
|
||||
imagej.hdf5.iml
|
||||
.DS_Store
|
||||
.idea
|
||||
/target
|
||||
.gradle
|
||||
build
|
||||
out/
|
||||
|
29
.project
29
.project
@ -1,29 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>ch.psi.imagej.hdf5</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>ch.acanda.eclipse.pmd.builder.PMDBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
<nature>ch.acanda.eclipse.pmd.builder.PMDNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
@ -1,3 +0,0 @@
|
||||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding//src/test/java=UTF-8
|
@ -1,5 +0,0 @@
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
@ -1,4 +0,0 @@
|
||||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
BIN
ImageJ_Updater_AdvancedMode.png
Normal file
BIN
ImageJ_Updater_AdvancedMode.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 21 KiB |
BIN
ImageJ_Updater_search_java8.png
Normal file
BIN
ImageJ_Updater_search_java8.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 40 KiB |
115
Readme.md
115
Readme.md
@ -1,10 +1,11 @@
|
||||
# Overview
|
||||
|
||||
ImageJ plugin for reading and writing HDF5 files.
|
||||
|
||||
For 3D datasets an individual slice can be selected for visualization.
|
||||
Also, especially for very big datasets only every x-th slice can be selected
|
||||
for visualization. This can be done by either specifying a number, e.g. `10` (for the slice 10) or a number with a preceding %, e.g. `%10` (for every 10th image). Indexing starts at 0.
|
||||
Upon opening an HDF5 file, an import dialog lists the available image data sets contained in the file and allows the selection of one or multiple data sets to be opened:
|
||||
|
||||

|
||||
|
||||
Note that the Fiji distribution of ImageJ comes with an hdf5 plugin already installed out of the box. This packaged hdf5 plugin (HDF5_Vibez) has some great features, and may be all you need. However, it does not allow one to load large image arrays as virtual stacks or to select only sliced subsets of the data, and thus often results in "out of memory" errors when working with large data sets. In those cases, the PSI plugin for reading and writing HDF5 files described here might be your preferred choice to work with HDF5 files.
|
||||
|
||||
# Usage
|
||||
|
||||
@ -20,22 +21,49 @@ To save to an HDF5 file use:
|
||||
File > SaveAs > HDF5
|
||||
```
|
||||
|
||||
## Scripting
|
||||
|
||||
To use this plugin from the ImageJs' (python) scripting interface these lines
|
||||
can be used to open a dataset:
|
||||
|
||||
```python
|
||||
from ch.psi.imagej.hdf5 import HDF5Reader
|
||||
reader = HDF5Reader()
|
||||
stack = reader.open("",False, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data_dark", True)
|
||||
```
|
||||
|
||||
# Installation
|
||||
To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM.
|
||||
|
||||
* Download latest HDF5 ImageJ plugin from [releases](https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5/releases).
|
||||
## Prerequisites
|
||||
This plugin requireds ImageJ/Fiji to be run with a Java 8 or greater. To check with wich version your installation is running please refer to the [Troubleshooting](#Troubleshooting) section below.
|
||||
|
||||
* Go into the ImageJ installation folder and extract the downloaded zip.
|
||||
## ImageJ
|
||||
All you need is to download the latest HDF5 ImageJ plugin from [releases](https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5/releases) and copy the jar into the `plugins` directory of ImageJ. After this you should be able to simply start ImageJ and use the Plugin.
|
||||
|
||||
## Fiji
|
||||
Fiji already comes with an HDF5 plugin (HDF5_Vibez) installed. However, as mentioned above it has certain limitations. Before installing this plugin, HDF5_Vibez need to be deinstalled.
|
||||
|
||||
To disable the standard hdf5 plugin, follow these steps.
|
||||
|
||||
* Close any running instances of the Fiji applications
|
||||
* Locate the installation directory of the Fiji application (`FIJI_DIR`).
|
||||
* Remove the HDF5_Vibez jar from the `plugins` directory: `rm $FIJI_DIR/plugins/HDF5_Vibez*.jar`
|
||||
* Remove the provided HDF5 binary jar from the `jar` directory: `rm $FIJI_DIR/jars/jhdf5-*.jar`
|
||||
|
||||
The installation of the plugin essentially only requires one to add the downloaded jar into the `plugins` directory inside the Fiji installation directory. Follow these steps:
|
||||
|
||||
* Download the latest HDF5 ImageJ plugin from [releases](https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5/releases) and copy the jar into the `$FIJI_DIR/plugins` directory
|
||||
* Restart Fiji.
|
||||
|
||||
To verify the correct installation:
|
||||
* Try to import an hdf5 file: `Fiji > File > Import > HDF5...`. If the installation of the pluging was sucessful, the file import dialog should look like in the screenshot below once you have selected an hdf5 file.
|
||||
|
||||

|
||||
|
||||
```
|
||||
cd <IMAGEJ_HOME>
|
||||
unzip <path of downloaded zip>
|
||||
```
|
||||
|
||||
## Configuration (Optional)
|
||||
|
||||
If you want to configure the HDF5 Reader as a standard file reader you need to register the reader within the `HandleExtraFileTypes.java` file.
|
||||
This can be done as follows (details on this can be found on: http://albert.rierol.net/imagej_programming_tutorials.html):
|
||||
If you want to configure the HDF5 Reader as a standard file reader you need to register the reader within the `HandleExtraFileTypes.java` file. This can be done as follows (details on this can be found on: http://albert.rierol.net/imagej_programming_tutorials.html):
|
||||
|
||||
* Add `HandleExtraFileTypes.java`
|
||||
|
||||
@ -50,40 +78,59 @@ if (name.endsWith(".h5") || name.endsWith(".hdf5")) {
|
||||
javac -classpath ij.jar ./plugins/Input-Output/HandleExtraFileTypes.java
|
||||
```
|
||||
|
||||
# Usage
|
||||
|
||||
## Mac OS X
|
||||
# Troubleshooting
|
||||
## Checking the Java version
|
||||
|
||||
```
|
||||
java -Djava.library.path=./lib/mac64 -Xmx3048m -jar ImageJ64.app/Contents/Resources/Java/ij.jar
|
||||
```
|
||||
You can check whether Java-8 is included with Fiji as follows:
|
||||
|
||||
## Linux
|
||||
* Open the update dialog from the menu via `Fiji > Help > Update...`.
|
||||
* Wait for the application to finish checking for new updates.
|
||||
* In the ImageJ Updater Window, click on "Advanced Mode"
|
||||
* Type `java-8` into the "Search" field.
|
||||
* If you see an entry `lib/Java-8` in the results box below, then Java 8 is
|
||||
ready to be used on your system (see screenshots below).
|
||||
|
||||
```
|
||||
java -Djava.library.path=./lib/linux64 -Xmx3048m -jar ij.jar
|
||||
```
|
||||
|
||||
The `Xmx` setting is quite random it depends on how big hdf5 files you are planning to open.
|
||||

|
||||

|
||||
|
||||
|
||||
# Fiji
|
||||
Fiji currently comes with Java 6 bundled. As the HDF5 Plugin requires Java 7 or higher we have to instruct Fiji to use an alternative Java.
|
||||
This can be done as follows:
|
||||
## Check Which Plugin is Installed
|
||||
|
||||
You will be able to tell which plugin is currently active when trying to import an hdf5 file (`Fiji > File > Import > HDF5...`). Once you have selected an hdf5 file, the standard HDF5_Vibez plugin opens an import dialog that looks as follows:
|
||||
|
||||

|
||||
|
||||
|
||||
## Running Older Fiji Versions with Java 8 or Greater
|
||||
For older versions of Fiji, we have to instruct Fiji to use an alternative Java (which has to be installed on the systems separately, of course). This can be done as follows:
|
||||
|
||||
```
|
||||
cd <FIJI_HOME>
|
||||
<fiji> --java-home /usr/lib/jvm/jre-1.7.0-openjdk.x86_64 -Djava.library.path=lib/linux64
|
||||
<fiji> --java-home /usr/lib/jvm/jre-1.8.0-openjdk.x86_64
|
||||
```
|
||||
|
||||
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
|
||||
## Adjust Memory Settings
|
||||
For normal usage, you should just be able to open Fiji/ImageJ as usual and start using the hdf5 plugin. If you experience out of memory problems while opening very large datasets try to increaste the initial amount of memory to be used as follows:
|
||||
|
||||
```
|
||||
java -Xmx3048m -jar ij.jar
|
||||
```
|
||||
|
||||
The `Xmx` setting is quite "random" it depends on how big hdf5 files you are planning to open.
|
||||
|
||||
|
||||
# Development
|
||||
To create an all in one zip file for installation in a ImageJ installation use:
|
||||
`mvn clean compile assembly:assembly`
|
||||
To create an all in one jar file for installation in a ImageJ installation use: `./gradlew clean fatJar`
|
||||
|
||||
The jar file is an all in one jar including all required native libraries for Windows, Linux and Mac OS X.
|
||||
|
||||
|
||||
## Dependencies
|
||||
The java HDF5 libary as well as the precompiled code we downloaded and copied from: https://wiki-bsse.ethz.ch/display/JHDF5/Download+Page . All the necessary jars where copied from there into the `/lib` directory.
|
||||
|
||||
The files in the package hdf.objects in this repository were copied from the hdfviewer source code. We were not able to find a jar that contained them.
|
||||
|
||||
The zip file contains an all in one jar as well as the required native libraries for Windows, Linux and Mac OS X.
|
||||
|
||||
# Acknowledgements
|
||||
This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany ( https://code.google.com/p/imagej-hdf ) .
|
||||
It is a complete rewrite of the code with the focus on efficiency and maintainability
|
||||
This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany (https://code.google.com/p/imagej-hdf ) . It is a complete rewrite of the code with the focus on efficiency and maintainability
|
||||
|
BIN
RunSettings.png
Normal file
BIN
RunSettings.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 358 KiB |
122
build.gradle
122
build.gradle
@ -1,30 +1,124 @@
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
//apply plugin: 'maven'
|
||||
|
||||
group = 'ch.psi'
|
||||
version = '0.8.0'
|
||||
version = '0.13.0'
|
||||
|
||||
description = """"""
|
||||
|
||||
sourceCompatibility = 1.7
|
||||
targetCompatibility = 1.7
|
||||
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven { url "http://slsyoke4.psi.ch:8081/artifactory/libs-releases" }
|
||||
maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
|
||||
maven { url "http://maven.imagej.net/content/repositories/public/" }
|
||||
flatDir {
|
||||
dirs 'lib'
|
||||
}
|
||||
}
|
||||
|
||||
// define a provided scope
|
||||
configurations {
|
||||
provided
|
||||
compile.extendsFrom provided
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile group: 'hdf5', name: 'hdf', version:'2.10.0'
|
||||
compile group: 'hdf5', name: 'hdfobj', version:'2.10.0'
|
||||
compile group: 'hdf5', name: 'hdf5', version:'2.10.0'
|
||||
compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0'
|
||||
|
||||
compile name: 'sis-jhdf5-19.04.0'
|
||||
compile name: 'sis-base-18.09.0'
|
||||
compile name: 'commons-io-2.6'
|
||||
// compile name: 'sis-base-18.09.0'
|
||||
// compile group: 'cisd', name: 'jhdf5', version: '14.12.6'
|
||||
// compile group: 'hdf5', name: 'hdf', version:'2.10.0'
|
||||
// compile group: 'hdf5', name: 'hdfobj', version:'2.10.0'
|
||||
// compile group: 'hdf5', name: 'hdf5', version:'2.10.0'
|
||||
// compile group: 'hdf5', name: 'hdf5obj', version:'2.10.0'
|
||||
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.6'
|
||||
testCompile group: 'junit', name: 'junit', version: '4.11'
|
||||
compile(group: 'gov.nih.imagej', name: 'imagej', version:'1.46') {
|
||||
/* This dependency was originally in the Maven provided scope, but the project was not of type war.
|
||||
This behavior is not yet supported by Gradle, so this dependency has been converted to a compile dependency.
|
||||
Please review and delete this closure when resolved. */
|
||||
|
||||
|
||||
provided group: 'net.imagej', name: 'ij', version: '1.53e'
|
||||
}
|
||||
|
||||
task fatJar(type: Jar) {
|
||||
// manifest {
|
||||
// attributes 'Implementation-Title': 'CaView',
|
||||
// 'Implementation-Version': version,
|
||||
// 'Main-Class': 'ch.psi.caview.CaView'
|
||||
// }
|
||||
archiveBaseName = 'HDF5_Viewer'
|
||||
from { (configurations.compile - configurations.provided).collect { it.isDirectory() ? it : zipTree(it) } }
|
||||
with jar
|
||||
}
|
||||
|
||||
//task distributionZip(type: Zip, dependsOn: [fatJar]) {
|
||||
// baseName "${project.group}-jhdf5"
|
||||
//
|
||||
// from('targets/dist') {
|
||||
// into "${project.group}-jhdf5"
|
||||
// include 'version.txt'
|
||||
// include 'build.txt'
|
||||
// }
|
||||
//
|
||||
// from ("$buildDir/output/dist") {
|
||||
// into "${project.group}-jhdf5"
|
||||
// include '*'
|
||||
// }
|
||||
//
|
||||
// from ("$buildDir/output/dist") {
|
||||
// into "${project.group}-jhdf5"
|
||||
// include 'bin/*'
|
||||
// }
|
||||
//
|
||||
// from ("$buildDir/output/dist") {
|
||||
// into "${project.group}-jhdf5"
|
||||
// include 'doc/*'
|
||||
// }
|
||||
//
|
||||
// from (javadocJar.archivePath) {
|
||||
// into "${project.group}-jhdf5/doc"
|
||||
// }
|
||||
//
|
||||
// from (exampleJar.archivePath) {
|
||||
// into "${project.group}-jhdf5/doc"
|
||||
// }
|
||||
//
|
||||
// from (configurations.runtime) {
|
||||
// include '*-sources*'
|
||||
// into "${project.group}-jhdf5/sources"
|
||||
// }
|
||||
//
|
||||
// from (sourcesJar.archivePath) {
|
||||
// into "${project.group}-jhdf5/sources"
|
||||
// }
|
||||
//
|
||||
// from (jar.archivePath) {
|
||||
// into "${project.group}-jhdf5/lib"
|
||||
// }
|
||||
//
|
||||
// from (h5arCLIJar.archivePath) {
|
||||
// into "${project.group}-jhdf5/lib"
|
||||
// }
|
||||
//
|
||||
// from (configurations.runtime) {
|
||||
// exclude '*restriction*'
|
||||
// exclude '*bcel*'
|
||||
// exclude '*-sources*'
|
||||
// into "${project.group}-jhdf5/lib"
|
||||
// }
|
||||
//
|
||||
// from ("libs") {
|
||||
// into "${project.group}-jhdf5/lib"
|
||||
// include 'native/**'
|
||||
// }
|
||||
//
|
||||
// from (configurations.runtime.collect {zipTree(it)}) {
|
||||
// into "${project.group}-jhdf5/lib"
|
||||
// include 'native/**/*'
|
||||
// }
|
||||
//
|
||||
//}
|
||||
|
||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
3
gradle/wrapper/gradle-wrapper.properties
vendored
3
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,5 @@
|
||||
#Tue Apr 14 08:25:23 CEST 2015
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-2.3-bin.zip
|
||||
|
74
gradlew
vendored
74
gradlew
vendored
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/usr/bin/env sh
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
@ -6,12 +6,30 @@
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
@ -30,6 +48,7 @@ die ( ) {
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
@ -40,31 +59,11 @@ case "`uname`" in
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched.
|
||||
if $cygwin ; then
|
||||
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
fi
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >&-
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >&-
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
@ -90,7 +89,7 @@ location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
@ -114,6 +113,7 @@ fi
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
@ -154,11 +154,19 @@ if $cygwin ; then
|
||||
esac
|
||||
fi
|
||||
|
||||
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
||||
function splitJvmOpts() {
|
||||
JVM_OPTS=("$@")
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
||||
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
||||
APP_ARGS=$(save "$@")
|
||||
|
||||
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
|
14
gradlew.bat
vendored
14
gradlew.bat
vendored
@ -8,14 +8,14 @@
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
@ -46,10 +46,9 @@ echo location of your Java installation.
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windowz variants
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
if "%@eval[2+2]" == "4" goto 4NT_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
@ -60,11 +59,6 @@ set _SKIP=2
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
goto execute
|
||||
|
||||
:4NT_args
|
||||
@rem Get arguments from the 4NT Shell from JP Software
|
||||
set CMD_LINE_ARGS=%$
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
BIN
hdf5_Vibez_select_datasets.png
Normal file
BIN
hdf5_Vibez_select_datasets.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 119 KiB |
BIN
hdf5plugin_select_datasets.png
Normal file
BIN
hdf5plugin_select_datasets.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
BIN
lib/commons-io-2.6.jar
Normal file
BIN
lib/commons-io-2.6.jar
Normal file
Binary file not shown.
BIN
lib/sis-base-18.09.0.jar
Normal file
BIN
lib/sis-base-18.09.0.jar
Normal file
Binary file not shown.
BIN
lib/sis-jhdf5-19.04.0.jar
Normal file
BIN
lib/sis-jhdf5-19.04.0.jar
Normal file
Binary file not shown.
77
pom.xml
77
pom.xml
@ -1,77 +0,0 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ch.psi</groupId>
|
||||
<artifactId>imagej.hdf5</artifactId>
|
||||
<version>0.9.0</version>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>gov.nih.imagej</groupId>
|
||||
<artifactId>imagej</artifactId>
|
||||
<version>1.46</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>hdf5</groupId>
|
||||
<artifactId>hdf</artifactId>
|
||||
<version>2.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hdf5</groupId>
|
||||
<artifactId>hdfobj</artifactId>
|
||||
<version>2.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hdf5</groupId>
|
||||
<artifactId>hdf5</artifactId>
|
||||
<version>2.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hdf5</groupId>
|
||||
<artifactId>hdf5obj</artifactId>
|
||||
<version>2.10.0</version>
|
||||
</dependency>
|
||||
<!-- The HDF5 libraries -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>1.7.6</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<configuration>
|
||||
<encoding>UTF-8</encoding>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<finalName>HDF5_Viewer-${pom.version}</finalName>
|
||||
<appendAssemblyId>false</appendAssemblyId>
|
||||
<archive />
|
||||
<descriptors>
|
||||
<descriptor>src/main/assembly/assembly_jar.xml</descriptor>
|
||||
<descriptor>src/main/assembly/assembly.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -1,28 +0,0 @@
|
||||
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
|
||||
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<id>bin</id>
|
||||
<!-- Generates a zip package containing the needed files -->
|
||||
<formats>
|
||||
<format>zip</format>
|
||||
</formats>
|
||||
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>src/main/assembly/lib</directory>
|
||||
<outputDirectory>lib</outputDirectory>
|
||||
<includes>
|
||||
<include>**/*</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<!-- adds fat-jar assembled earlier to the root directory of zip package -->
|
||||
<fileSet>
|
||||
<directory>target</directory>
|
||||
<outputDirectory>plugins</outputDirectory>
|
||||
<includes>
|
||||
<include>HDF5_Viewer*.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
</assembly>
|
@ -1,18 +0,0 @@
|
||||
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<!-- TODO: a jarjar format would be better -->
|
||||
<id>jar-with-dependencies</id>
|
||||
<formats>
|
||||
<format>jar</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useProjectArtifact>true</useProjectArtifact>
|
||||
<unpack>true</unpack>
|
||||
<scope>runtime</scope>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
</assembly>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -3,7 +3,7 @@ package ch.psi.imagej.hdf5;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import ncsa.hdf.object.Dataset;
|
||||
import hdf.object.Dataset;
|
||||
|
||||
public class DatasetSelection {
|
||||
|
||||
|
@ -1,5 +1,9 @@
|
||||
package ch.psi.imagej.hdf5;
|
||||
|
||||
import hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import hdf.object.Dataset;
|
||||
import hdf.object.Datatype;
|
||||
import hdf.object.h5.H5File;
|
||||
import ij.IJ;
|
||||
import ij.ImagePlus;
|
||||
import ij.CompositeImage;
|
||||
@ -10,14 +14,10 @@ import ij.plugin.PlugIn;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import ncsa.hdf.object.*;
|
||||
import ncsa.hdf.object.h5.*;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
|
||||
public class HDF5Reader implements PlugIn {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName());
|
||||
@ -28,45 +28,88 @@ public class HDF5Reader implements PlugIn {
|
||||
*/
|
||||
public static void main(String[] args){
|
||||
HDF5Reader r = new HDF5Reader();
|
||||
r.run("");
|
||||
// r.run("");
|
||||
r.open("",false, "pollen.h5", "/t0/channel0", true);
|
||||
}
|
||||
|
||||
public void run(String arg) {
|
||||
open(arg, true, null, null, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function plugin
|
||||
* arg is a space separated list of arguments that can be passed to the run method.
|
||||
* arg looks something like this: "para1=value1 para2=value2 ....."
|
||||
*
|
||||
* Supported arguments for arg:
|
||||
* open=<path>
|
||||
* dataset=/your/path/to/dataset
|
||||
*
|
||||
*/
|
||||
public void run(String arg) {
|
||||
|
||||
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack) {
|
||||
return open(arg, interactive, filename, nameOfDataset, virtualstack, true);
|
||||
}
|
||||
|
||||
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack, boolean showImage) {
|
||||
|
||||
// Map arguments = HDF5Reader.parseArguments(arg);
|
||||
|
||||
File tfile = null;
|
||||
if(interactive) {
|
||||
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
|
||||
|
||||
|
||||
File tfile = new File(od.getDirectory() + od.getFileName());
|
||||
tfile = new File(od.getDirectory() + od.getFileName());
|
||||
if (!tfile.exists() || !tfile.canRead()) {
|
||||
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
String filename = tfile.getAbsolutePath();
|
||||
|
||||
// Overwrite filename with selected filename
|
||||
filename = tfile.getAbsolutePath();
|
||||
|
||||
IJ.showStatus("Loading HDF5 File: " + filename);
|
||||
IJ.showProgress(0.0);
|
||||
}
|
||||
|
||||
// Read HDF5 file
|
||||
H5File file = null;
|
||||
boolean close = true;
|
||||
|
||||
List<ImageStack> stacks = new ArrayList<>();
|
||||
ImageStack stack = null;
|
||||
|
||||
try {
|
||||
file = new H5File(filename, H5File.READ);
|
||||
file.setMaxMembers(Integer.MAX_VALUE);
|
||||
file.open();
|
||||
|
||||
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
|
||||
DatasetSelection selectedDatasets = selectDatasets(datasets);
|
||||
|
||||
DatasetSelection selectedDatasets = null;
|
||||
if(interactive){
|
||||
logger.info("Using manual selection");
|
||||
// Manual selection of the dataset and other parameters via a dialog
|
||||
selectedDatasets = selectDatasets(datasets);
|
||||
}
|
||||
else{
|
||||
logger.info("Using automatic selection");
|
||||
selectedDatasets = new DatasetSelection();
|
||||
for(Dataset dataset: datasets){
|
||||
if(dataset.getFullName().equals(nameOfDataset)){
|
||||
selectedDatasets.getDatasets().add(dataset);
|
||||
break; // we only support one selection for the time being
|
||||
}
|
||||
}
|
||||
selectedDatasets.setVirtualStack(virtualstack);
|
||||
}
|
||||
|
||||
|
||||
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
|
||||
close=!selectedDatasets.isVirtualStack();
|
||||
|
||||
|
||||
// TODO Remove
|
||||
// Hack as a proof of principle
|
||||
if(selectedDatasets.isGroup()){
|
||||
ImageStack stack = null;
|
||||
|
||||
for (Dataset var : selectedDatasets.getDatasets()) {
|
||||
if(stack == null){
|
||||
@ -80,10 +123,13 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename, stack);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
return;
|
||||
}
|
||||
|
||||
stacks.add(stack);
|
||||
return stack; // TODO should return stacks instead of stack
|
||||
}
|
||||
|
||||
for (Dataset var : selectedDatasets.getDatasets()) {
|
||||
|
||||
@ -93,7 +139,7 @@ public class HDF5Reader implements PlugIn {
|
||||
int numberOfDimensions = var.getRank();
|
||||
long[] dimensions= var.getDims();
|
||||
|
||||
logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDatatypeDescription());
|
||||
logger.info("Reading dataset: " + datasetName + " Dimensions: " + numberOfDimensions + " Type: " + datatype.getDescription());
|
||||
|
||||
|
||||
// Read dataset
|
||||
@ -111,7 +157,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stacks.add(stack);
|
||||
int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
|
||||
int singleVolumeSize = (int) (dimensions[1] * stackSize);
|
||||
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
|
||||
@ -126,7 +173,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 4 && dimensions[3] == 3) {
|
||||
logger.info("3D RGB Image");
|
||||
@ -141,7 +190,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||
stacks.add(stack);
|
||||
int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
|
||||
for (int lev = 0; lev < dimensions[0]; ++lev) {
|
||||
int startIdx = lev * stackSize;
|
||||
@ -153,7 +203,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 4) {
|
||||
logger.info("4D Image (HyperVolume)");
|
||||
@ -168,7 +220,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stacks.add(stack);
|
||||
int size = (int) (dimensions[2] * dimensions[3]);
|
||||
long singleVolumeSize = dimensions[1] * size;
|
||||
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
|
||||
@ -182,7 +235,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp.setDimensions(1, (int) dimensions[1], (int) dimensions[0]);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 3 && dimensions[2] == 3) {
|
||||
logger.info("2D RGB Image");
|
||||
@ -196,7 +251,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stacks.add(stack);
|
||||
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
|
||||
@ -204,13 +260,13 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 3) {
|
||||
logger.info("3D Image");
|
||||
|
||||
ImageStack stack;
|
||||
|
||||
if(selectedDatasets.isVirtualStack()){
|
||||
logger.info("Use virtual stack");
|
||||
stack = new VirtualStackHDF5(file, var);
|
||||
@ -278,21 +334,27 @@ public class HDF5Reader implements PlugIn {
|
||||
}
|
||||
}
|
||||
|
||||
stacks.add(stack);
|
||||
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 2) {
|
||||
logger.info("2D Image");
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stacks.add(stack);
|
||||
addSlice(stack, wholeDataset);
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
|
||||
imp.resetDisplayRange();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else {
|
||||
IJ.showStatus("Variable Dimension " + numberOfDimensions + " not supported");
|
||||
@ -319,6 +381,8 @@ public class HDF5Reader implements PlugIn {
|
||||
}
|
||||
|
||||
IJ.showProgress(1.0);
|
||||
|
||||
return stack; // TODO should return stacks instead of stack
|
||||
}
|
||||
|
||||
/**
|
||||
@ -353,7 +417,6 @@ public class HDF5Reader implements PlugIn {
|
||||
return selectedDatasets;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add slice to image stack
|
||||
* @param stack Stack to add slice
|
||||
@ -433,4 +496,22 @@ public class HDF5Reader implements PlugIn {
|
||||
stack.addSlice(null, g);
|
||||
stack.addSlice(null, b);
|
||||
}
|
||||
|
||||
public static Map<String,String> parseArguments(String arg){
|
||||
|
||||
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
|
||||
Map<String,String> map = new HashMap<>();
|
||||
arg = arg.trim();
|
||||
for(String argument: arg.split("\\s+")){
|
||||
String[] entry = argument.split("=");
|
||||
if(entry.length==2) {
|
||||
map.put(entry[0], entry[1]);
|
||||
}
|
||||
else{
|
||||
// ignore
|
||||
logger.warning("Cannot parse argument " + argument + " - Ignore");
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
@ -11,40 +11,40 @@ import java.util.regex.Pattern;
|
||||
|
||||
import javax.swing.tree.DefaultMutableTreeNode;
|
||||
|
||||
import ncsa.hdf.object.Attribute;
|
||||
import ncsa.hdf.object.Dataset;
|
||||
import ncsa.hdf.object.FileFormat;
|
||||
import ncsa.hdf.object.Group;
|
||||
import ncsa.hdf.object.HObject;
|
||||
import ncsa.hdf.object.h5.H5File;
|
||||
import hdf.object.Attribute;
|
||||
import hdf.object.Dataset;
|
||||
import hdf.object.FileFormat;
|
||||
import hdf.object.Group;
|
||||
import hdf.object.HObject;
|
||||
import hdf.object.h5.H5File;
|
||||
|
||||
public class HDF5Utilities {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(HDF5Utilities.class.getName());
|
||||
|
||||
/**
|
||||
* Get attributes from object
|
||||
* @param object Object to retrieve the attributes from
|
||||
* @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
|
||||
*/
|
||||
public static Map<String,Attribute> getAttributes(HObject object) {
|
||||
Objects.requireNonNull(object);
|
||||
|
||||
Map<String, Attribute> attributes = new HashMap<>();
|
||||
try{
|
||||
for(Object m: object.getMetadata()){
|
||||
if(m instanceof Attribute){
|
||||
attributes.put(((Attribute) m).getName(), (Attribute) m);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(Exception e){
|
||||
logger.warning("Unable to retrieve metadata from object");
|
||||
return null;
|
||||
}
|
||||
|
||||
return attributes;
|
||||
}
|
||||
// /**
|
||||
// * Get attributes from object
|
||||
// * @param object Object to retrieve the attributes from
|
||||
// * @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
|
||||
// */
|
||||
// public static Map<String,Attribute> getAttributes(HObject object) {
|
||||
// Objects.requireNonNull(object);
|
||||
//
|
||||
// Map<String, Attribute> attributes = new HashMap<>();
|
||||
// try{
|
||||
// for(Object m: object.getMetadata()){
|
||||
// if(m instanceof Attribute){
|
||||
// attributes.put(((Attribute) m).getName(), (Attribute) m);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// catch(Exception e){
|
||||
// logger.warning("Unable to retrieve metadata from object");
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// return attributes;
|
||||
// }
|
||||
|
||||
|
||||
/**
|
||||
@ -85,7 +85,7 @@ public class HDF5Utilities {
|
||||
* @return
|
||||
*/
|
||||
public static Group createGroup( FileFormat file, String groupName) {
|
||||
return createGroup(file, (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject(), groupName);
|
||||
return createGroup(file, (Group) file.getRootObject(), groupName);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -100,7 +100,7 @@ public class HDF5Utilities {
|
||||
Objects.requireNonNull(groupName);
|
||||
|
||||
if (group == null){
|
||||
group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject();
|
||||
group = (Group) file.getRootObject();
|
||||
}
|
||||
|
||||
Group ngroup = group;
|
||||
@ -130,7 +130,7 @@ public class HDF5Utilities {
|
||||
* @return
|
||||
*/
|
||||
public static List<Dataset> getDatasets(H5File file) {
|
||||
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) file.getRootNode()).getUserObject();
|
||||
Group rootNode = (Group) file.getRootObject();
|
||||
List<Dataset> datasets = getDatasets(rootNode);
|
||||
return datasets;
|
||||
}
|
||||
|
@ -1,6 +1,12 @@
|
||||
package ch.psi.imagej.hdf5;
|
||||
|
||||
|
||||
import hdf.object.Dataset;
|
||||
import hdf.object.Datatype;
|
||||
import hdf.object.FileFormat;
|
||||
import hdf.object.Group;
|
||||
import hdf.object.h5.H5Datatype;
|
||||
import hdf.object.h5.H5File;
|
||||
import ij.*;
|
||||
import ij.io.*;
|
||||
import ij.plugin.filter.PlugInFilter;
|
||||
@ -10,9 +16,9 @@ import ij.gui.*;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import ncsa.hdf.object.*; // the common object package
|
||||
import ncsa.hdf.object.h5.*; // the HDF5 implementation
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
//import hdf.object.*; // the common object package
|
||||
//import hdf.object.h5.*; // the HDF5 implementation
|
||||
import hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
|
||||
public class HDF5Writer implements PlugInFilter {
|
||||
|
||||
@ -60,6 +66,7 @@ public class HDF5Writer implements PlugInFilter {
|
||||
int imgColorType = imp.getType();
|
||||
|
||||
Datatype type = null;
|
||||
try {
|
||||
if (imgColorType == ImagePlus.GRAY8) {
|
||||
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
|
||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||
@ -73,6 +80,9 @@ public class HDF5Writer implements PlugInFilter {
|
||||
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
|
||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||
}
|
||||
} catch (Exception e){
|
||||
throw new RuntimeException("Unable to create dataset", e);
|
||||
}
|
||||
|
||||
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
|
||||
// Hyperstack
|
||||
|
@ -29,32 +29,32 @@ public class ImagePlusHDF5 extends ImagePlus {
|
||||
|
||||
@Override
|
||||
public void windowOpened(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowIconified(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowDeiconified(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowDeactivated(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowClosing(WindowEvent e) {
|
||||
logger.info("Closing");
|
||||
// logger.info("Closing");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowClosed(WindowEvent e) {
|
||||
logger.info("Closed");
|
||||
// logger.info("Closed");
|
||||
|
||||
if(stack!=null){
|
||||
stack.close();
|
||||
@ -63,7 +63,7 @@ public class ImagePlusHDF5 extends ImagePlus {
|
||||
|
||||
@Override
|
||||
public void windowActivated(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ import javax.swing.JPanel;
|
||||
import javax.swing.JScrollPane;
|
||||
import javax.swing.ScrollPaneConstants;
|
||||
|
||||
import ncsa.hdf.object.Dataset;
|
||||
import hdf.object.Dataset;
|
||||
import javax.swing.JTextField;
|
||||
import java.awt.FlowLayout;
|
||||
|
||||
|
@ -3,8 +3,8 @@ package ch.psi.imagej.hdf5;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import ncsa.hdf.object.Dataset;
|
||||
import ncsa.hdf.object.h5.H5File;
|
||||
import hdf.object.Dataset;
|
||||
import hdf.object.h5.H5File;
|
||||
import ij.ImageStack;
|
||||
import ij.process.ByteProcessor;
|
||||
import ij.process.ColorProcessor;
|
||||
|
1459
src/main/java/hdf/object/Attribute.java
Normal file
1459
src/main/java/hdf/object/Attribute.java
Normal file
File diff suppressed because it is too large
Load Diff
445
src/main/java/hdf/object/CompoundDS.java
Normal file
445
src/main/java/hdf/object/CompoundDS.java
Normal file
@ -0,0 +1,445 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
/**
|
||||
* A CompoundDS is a dataset with compound datatype.
|
||||
* <p>
|
||||
* A compound datatype is an aggregation of one or more datatypes. Each member
|
||||
* of a compound type has a name which is unique within that type, and a
|
||||
* datatype of that member in a compound datum. Compound datatypes can be nested,
|
||||
* i.e. members of a compound datatype can be some other compound datatype.
|
||||
* <p>
|
||||
* For more details on compound datatypes,
|
||||
* see <b> <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a> </b>
|
||||
* <p>
|
||||
* Since Java cannot handle C-structured compound data, data in a compound dataset
|
||||
* is loaded in to an Java List. Each element of the list is a data array that
|
||||
* corresponds to a compound field. The data is read/written by compound field.
|
||||
* <p>
|
||||
* For example, if compound dataset "comp" has the following nested structure,
|
||||
* and member datatypes
|
||||
*
|
||||
* <pre>
|
||||
* comp --> m01 (int)
|
||||
* comp --> m02 (float)
|
||||
* comp --> nest1 --> m11 (char)
|
||||
* comp --> nest1 --> m12 (String)
|
||||
* comp --> nest1 --> nest2 --> m21 (long)
|
||||
* comp --> nest1 --> nest2 --> m22 (double)
|
||||
* </pre>
|
||||
*
|
||||
* The data object is a Java list of six arrays: {int[], float[], char[],
|
||||
* Stirng[], long[] and double[]}.
|
||||
*
|
||||
*
|
||||
* @version 1.1 9/4/2007
|
||||
* @author Peter X. Cao
|
||||
*/
|
||||
public abstract class CompoundDS extends Dataset implements CompoundDataFormat {
|
||||
private static final long serialVersionUID = -4880399929644095662L;
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(CompoundDS.class);
|
||||
|
||||
/**
|
||||
* A single character to separate the names of nested compound fields. An
|
||||
* extended ASCII character, 0x95, is used to avoid common characters in
|
||||
* compound names.
|
||||
*/
|
||||
public static final String SEPARATOR = "\u0095";
|
||||
|
||||
/**
|
||||
* The number of members of the compound dataset.
|
||||
*/
|
||||
protected int numberOfMembers;
|
||||
|
||||
/**
|
||||
* The names of members of the compound dataset.
|
||||
*/
|
||||
protected String[] memberNames;
|
||||
|
||||
/**
|
||||
* Returns array containing the total number of elements of the members of
|
||||
* this compound dataset.
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* memberOrders is an integer array of {1, 5, 6} to indicate that member A
|
||||
* has one element, member B has 5 elements, and member C has 6 elements.
|
||||
*/
|
||||
protected int[] memberOrders;
|
||||
|
||||
/**
|
||||
* The dimension sizes of each member.
|
||||
* <p>
|
||||
* The i-th element of the Object[] is an integer array (int[]) that
|
||||
* contains the dimension sizes of the i-th member.
|
||||
*/
|
||||
protected transient Object[] memberDims;
|
||||
|
||||
/**
|
||||
* The datatypes of compound members.
|
||||
*/
|
||||
protected Datatype[] memberTypes;
|
||||
|
||||
/**
|
||||
* The array to store flags to indicate if a member of this compound
|
||||
* dataset is selected for read/write.
|
||||
* <p>
|
||||
* If a member is selected, the read/write will perform on the member.
|
||||
* Applications such as HDFView will only display the selected members of
|
||||
* the compound dataset.
|
||||
*
|
||||
* <pre>
|
||||
* For example, if a compound dataset has four members
|
||||
* String[] memberNames = {"X", "Y", "Z", "TIME"};
|
||||
* and
|
||||
* boolean[] isMemberSelected = {true, false, false, true};
|
||||
* members "X" and "TIME" are selected for read and write.
|
||||
* </pre>
|
||||
*/
|
||||
protected boolean[] isMemberSelected;
|
||||
|
||||
/**
|
||||
* Constructs a CompoundDS object with the given file, dataset name and path.
|
||||
* <p>
|
||||
* The dataset object represents an existing dataset in the file. For
|
||||
* example, new H5CompoundDS(file, "dset1", "/g0/") constructs a dataset
|
||||
* object that corresponds to the dataset, "dset1", at group "/g0/".
|
||||
* <p>
|
||||
* This object is usually constructed at FileFormat.open(), which loads the
|
||||
* file structure and object information into memory. It is rarely used
|
||||
* elsewhere.
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the dataset.
|
||||
* @param dsName
|
||||
* the name of the CompoundDS, e.g. "compDS".
|
||||
* @param dsPath
|
||||
* the full path of the CompoundDS, e.g. "/g1".
|
||||
*/
|
||||
public CompoundDS(FileFormat theFile, String dsName, String dsPath) {
|
||||
this(theFile, dsName, dsPath, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not for public use in the future.<br>
|
||||
* Using {@link #CompoundDS(FileFormat, String, String)}
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the dataset.
|
||||
* @param dsName
|
||||
* the name of the CompoundDS, e.g. "compDS".
|
||||
* @param dsPath
|
||||
* the full path of the CompoundDS, e.g. "/g1".
|
||||
* @param oid
|
||||
* the oid of the CompoundDS.
|
||||
*/
|
||||
@Deprecated
|
||||
public CompoundDS(FileFormat theFile, String dsName, String dsPath, long[] oid) {
|
||||
super(theFile, dsName, dsPath, oid);
|
||||
|
||||
numberOfMembers = 0;
|
||||
memberNames = null;
|
||||
isMemberSelected = null;
|
||||
memberTypes = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of members of the compound dataset.
|
||||
*
|
||||
* @return the number of members of the compound dataset.
|
||||
*/
|
||||
@Override
|
||||
public final int getMemberCount() {
|
||||
return numberOfMembers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of selected members of the compound dataset.
|
||||
*
|
||||
* Selected members are the compound fields which are selected for
|
||||
* read/write.
|
||||
* <p>
|
||||
* For example, in a compound datatype of {int A, float B, char[] C},
|
||||
* users can choose to retrieve only {A, C} from the dataset. In this
|
||||
* case, getSelectedMemberCount() returns two.
|
||||
*
|
||||
* @return the number of selected members.
|
||||
*/
|
||||
@Override
|
||||
public final int getSelectedMemberCount() {
|
||||
int count = 0;
|
||||
|
||||
if (isMemberSelected != null) {
|
||||
for (int i = 0; i < isMemberSelected.length; i++) {
|
||||
if (isMemberSelected[i]) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
log.trace("count of selected members={}", count);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the names of the members of the compound dataset. The names of
|
||||
* compound members are stored in an array of Strings.
|
||||
* <p>
|
||||
* For example, for a compound datatype of {int A, float B, char[] C}
|
||||
* getMemberNames() returns ["A", "B", "C"}.
|
||||
*
|
||||
* @return the names of compound members.
|
||||
*/
|
||||
@Override
|
||||
public final String[] getMemberNames() {
|
||||
return memberNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of the names of the selected members of the compound dataset.
|
||||
*
|
||||
* @return an array of the names of the selected members of the compound dataset.
|
||||
*/
|
||||
public final String[] getSelectedMemberNames() {
|
||||
if (isMemberSelected == null) {
|
||||
log.debug("getSelectedMemberNames(): isMemberSelected array is null");
|
||||
log.trace("getSelectedMemberNames(): finish");
|
||||
return memberNames;
|
||||
}
|
||||
|
||||
int idx = 0;
|
||||
String[] names = new String[getSelectedMemberCount()];
|
||||
for (int i = 0; i < isMemberSelected.length; i++) {
|
||||
if (isMemberSelected[i]) {
|
||||
names[idx++] = memberNames[i];
|
||||
}
|
||||
}
|
||||
|
||||
return names;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a member of the compound dataset is selected for read/write.
|
||||
*
|
||||
* @param idx
|
||||
* the index of compound member.
|
||||
*
|
||||
* @return true if the i-th memeber is selected; otherwise returns false.
|
||||
*/
|
||||
@Override
|
||||
public final boolean isMemberSelected(int idx) {
|
||||
if ((isMemberSelected != null) && (isMemberSelected.length > idx)) {
|
||||
return isMemberSelected[idx];
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects the i-th member for read/write.
|
||||
*
|
||||
* @param idx
|
||||
* the index of compound member.
|
||||
*/
|
||||
@Override
|
||||
public final void selectMember(int idx) {
|
||||
if ((isMemberSelected != null) && (isMemberSelected.length > idx)) {
|
||||
isMemberSelected[idx] = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects/deselects all members.
|
||||
*
|
||||
* @param selectAll
|
||||
* The indicator to select or deselect all members. If true, all
|
||||
* members are selected for read/write. If false, no member is
|
||||
* selected for read/write.
|
||||
*/
|
||||
@Override
|
||||
public final void setAllMemberSelection(boolean selectAll) {
|
||||
if (isMemberSelected == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < isMemberSelected.length; i++) {
|
||||
isMemberSelected[i] = selectAll;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns array containing the total number of elements of the members of
|
||||
* the compound dataset.
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* getMemberOrders() will return an integer array of {1, 5, 6} to indicate
|
||||
* that member A has one element, member B has 5 elements, and member C has
|
||||
* 6 elements.
|
||||
*
|
||||
* @return the array containing the total number of elements of the members
|
||||
* of compound.
|
||||
*/
|
||||
@Override
|
||||
public final int[] getMemberOrders() {
|
||||
return memberOrders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns array containing the total number of elements of the selected
|
||||
* members of the compound dataset.
|
||||
*
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* If A and B are selected, getSelectedMemberOrders() returns an array of
|
||||
* {1, 5}
|
||||
*
|
||||
* @return array containing the total number of elements of the selected
|
||||
* members of compound.
|
||||
*/
|
||||
@Override
|
||||
public final int[] getSelectedMemberOrders() {
|
||||
log.trace("getSelectedMemberOrders(): start");
|
||||
|
||||
if (isMemberSelected == null) {
|
||||
log.debug("getSelectedMemberOrders(): isMemberSelected array is null");
|
||||
log.trace("getSelectedMemberOrders(): finish");
|
||||
return memberOrders;
|
||||
}
|
||||
|
||||
int idx = 0;
|
||||
int[] orders = new int[getSelectedMemberCount()];
|
||||
for (int i = 0; i < isMemberSelected.length; i++) {
|
||||
if (isMemberSelected[i]) {
|
||||
orders[idx++] = memberOrders[i];
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("getSelectedMemberOrders(): finish");
|
||||
|
||||
return orders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the dimension sizes of the i-th member.
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1)
|
||||
* returns an array of {5}, and getMemberDims(0) returns null.
|
||||
*
|
||||
* @param i the i-th member
|
||||
*
|
||||
* @return the dimension sizes of the i-th member, null if the compound
|
||||
* member is not an array.
|
||||
*/
|
||||
@Override
|
||||
public final int[] getMemberDims(int i) {
|
||||
if (memberDims == null) {
|
||||
return null;
|
||||
}
|
||||
return (int[]) memberDims[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of datatype objects of compound members.
|
||||
* <p>
|
||||
* Each member of a compound dataset has its own datatype. The datatype of a
|
||||
* member can be atomic or other compound datatype (nested compound).
|
||||
* Sub-classes set up the datatype objects at init().
|
||||
* <p>
|
||||
*
|
||||
* @return the array of datatype objects of the compound members.
|
||||
*/
|
||||
@Override
|
||||
public final Datatype[] getMemberTypes() {
|
||||
return memberTypes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of datatype objects of selected compound members.
|
||||
*
|
||||
* @return an array of datatype objects of selected compound members.
|
||||
*/
|
||||
@Override
|
||||
public final Datatype[] getSelectedMemberTypes() {
|
||||
log.trace("getSelectedMemberTypes(): start");
|
||||
|
||||
if (isMemberSelected == null) {
|
||||
log.debug("getSelectedMemberTypes(): isMemberSelected array is null");
|
||||
log.trace("getSelectedMemberTypes(): finish");
|
||||
return memberTypes;
|
||||
}
|
||||
|
||||
int idx = 0;
|
||||
Datatype[] types = new Datatype[getSelectedMemberCount()];
|
||||
for (int i = 0; i < isMemberSelected.length; i++) {
|
||||
if (isMemberSelected[i]) {
|
||||
types[idx++] = memberTypes[i];
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("getSelectedMemberTypes(): finish");
|
||||
|
||||
return types;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not implemented for compound dataset.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public Dataset copy(Group pgroup, String name, long[] dims, Object data)
|
||||
throws Exception {
|
||||
throw new UnsupportedOperationException(
|
||||
"Writing a subset of a compound dataset to a new dataset is not implemented.");
|
||||
}
|
||||
}
|
184
src/main/java/hdf/object/CompoundDataFormat.java
Normal file
184
src/main/java/hdf/object/CompoundDataFormat.java
Normal file
@ -0,0 +1,184 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
/**
|
||||
* An interface that provides general operations for data with a Compound
|
||||
* datatype. For example, getting the names, dataspaces or datatypes of the
|
||||
* members of the Compound datatype.
|
||||
* <p>
|
||||
*
|
||||
* @see hdf.object.HObject
|
||||
*
|
||||
* @version 1.0 5/3/2018
|
||||
* @author Jordan T. Henderson
|
||||
*/
|
||||
public interface CompoundDataFormat extends DataFormat {
|
||||
|
||||
/**
|
||||
* Returns the number of members of the compound data object.
|
||||
*
|
||||
* @return the number of members of the compound data object.
|
||||
*/
|
||||
public abstract int getMemberCount();
|
||||
|
||||
/**
|
||||
* Returns the number of selected members of the compound data object.
|
||||
*
|
||||
* Selected members are the compound fields which are selected for read/write.
|
||||
* <p>
|
||||
* For example, in a compound datatype of {int A, float B, char[] C}, users can
|
||||
* choose to retrieve only {A, C} from the data object. In this case,
|
||||
* getSelectedMemberCount() returns two.
|
||||
*
|
||||
* @return the number of selected members.
|
||||
*/
|
||||
public abstract int getSelectedMemberCount();
|
||||
|
||||
/**
|
||||
* Returns the names of the members of the compound data object. The names of
|
||||
* compound members are stored in an array of Strings.
|
||||
* <p>
|
||||
* For example, for a compound datatype of {int A, float B, char[] C}
|
||||
* getMemberNames() returns ["A", "B", "C"}.
|
||||
*
|
||||
* @return the names of compound members.
|
||||
*/
|
||||
public abstract String[] getMemberNames();
|
||||
|
||||
/**
|
||||
* Returns an array of the names of the selected compound members.
|
||||
*
|
||||
* @return an array of the names of the selected compound members.
|
||||
*/
|
||||
public abstract String[] getSelectedMemberNames();
|
||||
|
||||
/**
|
||||
* Checks if a member of the compound data object is selected for read/write.
|
||||
*
|
||||
* @param idx
|
||||
* the index of compound member.
|
||||
*
|
||||
* @return true if the i-th memeber is selected; otherwise returns false.
|
||||
*/
|
||||
public abstract boolean isMemberSelected(int idx);
|
||||
|
||||
/**
|
||||
* Selects the i-th member for read/write.
|
||||
*
|
||||
* @param idx
|
||||
* the index of compound member.
|
||||
*/
|
||||
public abstract void selectMember(int idx);
|
||||
|
||||
/**
|
||||
* Selects/deselects all members.
|
||||
*
|
||||
* @param selectAll
|
||||
* The indicator to select or deselect all members. If true, all
|
||||
* members are selected for read/write. If false, no member is
|
||||
* selected for read/write.
|
||||
*/
|
||||
public abstract void setAllMemberSelection(boolean selectAll);
|
||||
|
||||
/**
|
||||
* Returns array containing the total number of elements of the members of the
|
||||
* compound data object.
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* getMemberOrders() will return an integer array of {1, 5, 6} to indicate that
|
||||
* member A has one element, member B has 5 elements, and member C has 6
|
||||
* elements.
|
||||
*
|
||||
* @return the array containing the total number of elements of the members of
|
||||
* the compound data object.
|
||||
*/
|
||||
public abstract int[] getMemberOrders();
|
||||
|
||||
/**
|
||||
* Returns array containing the total number of elements of the selected members
|
||||
* of the compound data object.
|
||||
*
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* If A and B are selected, getSelectedMemberOrders() returns an array of {1, 5}
|
||||
*
|
||||
* @return array containing the total number of elements of the selected members
|
||||
* of the compound data object.
|
||||
*/
|
||||
public abstract int[] getSelectedMemberOrders();
|
||||
|
||||
/**
|
||||
* Returns the dimension sizes of the i-th member.
|
||||
* <p>
|
||||
* For example, a compound dataset COMP has members of A, B and C as
|
||||
*
|
||||
* <pre>
|
||||
* COMP {
|
||||
* int A;
|
||||
* float B[5];
|
||||
* double C[2][3];
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* getMemberDims(2) returns an array of {2, 3}, while getMemberDims(1) returns
|
||||
* an array of {5}, and getMemberDims(0) returns null.
|
||||
*
|
||||
* @param i
|
||||
* the i-th member
|
||||
*
|
||||
* @return the dimension sizes of the i-th member, null if the compound member
|
||||
* is not an array.
|
||||
*/
|
||||
public abstract int[] getMemberDims(int i);
|
||||
|
||||
/**
|
||||
* Returns an array of datatype objects of the compound members.
|
||||
* <p>
|
||||
* Each member of a compound data object has its own datatype. The datatype of a
|
||||
* member can be atomic or other compound datatype (nested compound). The
|
||||
* datatype objects are setup at init().
|
||||
* <p>
|
||||
*
|
||||
* @return the array of datatype objects of the compound members.
|
||||
*/
|
||||
public abstract Datatype[] getMemberTypes();
|
||||
|
||||
/**
|
||||
* Returns an array of datatype objects of the selected compound members.
|
||||
*
|
||||
* @return an array of datatype objects of the selected compound members.
|
||||
*/
|
||||
public abstract Datatype[] getSelectedMemberTypes();
|
||||
|
||||
}
|
366
src/main/java/hdf/object/DataFormat.java
Normal file
366
src/main/java/hdf/object/DataFormat.java
Normal file
@ -0,0 +1,366 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
/**
|
||||
* An interface that provides general I/O operations for object data. For
|
||||
* example, reading data content from the file into memory or writing data
|
||||
* content from memory into the file.
|
||||
* <p>
|
||||
*
|
||||
* @see hdf.object.HObject
|
||||
*
|
||||
* @version 1.0 4/2/2018
|
||||
* @author Jordan T. Henderson
|
||||
*/
|
||||
public interface DataFormat {
|
||||
public abstract boolean isInited();
|
||||
|
||||
public abstract void init();
|
||||
|
||||
/**
|
||||
* Retrieves the object's data from the file.
|
||||
*
|
||||
* @return the object's data.
|
||||
*
|
||||
* @throws Exception
|
||||
* if the data can not be retrieved
|
||||
*/
|
||||
public abstract Object getData() throws Exception, OutOfMemoryError;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param data
|
||||
* the data to write.
|
||||
*/
|
||||
public abstract void setData(Object data);
|
||||
|
||||
/**
|
||||
* Clears the current data buffer in memory and forces the next read() to load
|
||||
* the data from file.
|
||||
* <p>
|
||||
* The function read() loads data from file into memory only if the data is not
|
||||
* read. If data is already in memory, read() just returns the memory buffer.
|
||||
* Sometimes we want to force read() to re-read data from file. For example,
|
||||
* when the selection is changed, we need to re-read the data.
|
||||
*
|
||||
* @see #getData()
|
||||
* @see #read()
|
||||
*/
|
||||
public abstract void clearData();
|
||||
|
||||
/**
|
||||
* Reads the data from file.
|
||||
* <p>
|
||||
* read() reads the data from file to a memory buffer and returns the memory
|
||||
* buffer. The dataset object does not hold the memory buffer. To store the
|
||||
* memory buffer in the dataset object, one must call getData().
|
||||
* <p>
|
||||
* By default, the whole dataset is read into memory. Users can also select
|
||||
* a subset to read. Subsetting is done in an implicit way.
|
||||
*
|
||||
* @return the data read from file.
|
||||
*
|
||||
* @see #getData()
|
||||
*
|
||||
* @throws Exception
|
||||
* if object can not be read
|
||||
* @throws OutOfMemoryError
|
||||
* if memory is exhausted
|
||||
*/
|
||||
public abstract Object read() throws Exception, OutOfMemoryError;
|
||||
|
||||
/**
|
||||
* Writes a memory buffer to the object in the file.
|
||||
*
|
||||
* @param buf
|
||||
* the data to write
|
||||
*
|
||||
* @throws Exception
|
||||
* if data can not be written
|
||||
*/
|
||||
public abstract void write(Object buf) throws Exception;
|
||||
|
||||
/**
|
||||
* Writes the current memory buffer to the object in the file.
|
||||
*
|
||||
* @throws Exception
|
||||
* if data can not be written
|
||||
*/
|
||||
public abstract void write() throws Exception;
|
||||
|
||||
/**
|
||||
* Converts the data values of this data object to appropriate Java integers if
|
||||
* they are unsigned integers.
|
||||
*
|
||||
* @see hdf.object.Dataset#convertToUnsignedC(Object)
|
||||
* @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
|
||||
*
|
||||
* @return the converted data buffer.
|
||||
*/
|
||||
public Object convertFromUnsignedC();
|
||||
|
||||
/**
|
||||
* Converts Java integer data values of this data object back to unsigned C-type
|
||||
* integer data if they are unsigned integers.
|
||||
*
|
||||
* @see hdf.object.Dataset#convertToUnsignedC(Object)
|
||||
* @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
|
||||
*
|
||||
* @return the converted data buffer.
|
||||
*/
|
||||
public Object convertToUnsignedC();
|
||||
|
||||
/**
|
||||
* Returns the fill values for the data object.
|
||||
*
|
||||
* @return the fill values for the data object.
|
||||
*/
|
||||
public abstract Object getFillValue();
|
||||
|
||||
/**
|
||||
* Returns the datatype of the data object.
|
||||
*
|
||||
* @return the datatype of the data object.
|
||||
*/
|
||||
public abstract Datatype getDatatype();
|
||||
|
||||
/**
|
||||
* Returns the rank (number of dimensions) of the data object. It returns a
|
||||
* negative number if it failed to retrieve the dimension information from
|
||||
* the file.
|
||||
*
|
||||
* @return the number of dimensions of the data object.
|
||||
*/
|
||||
public abstract int getRank();
|
||||
|
||||
/**
|
||||
* Returns the array that contains the dimension sizes of the data value of
|
||||
* the data object. It returns null if it failed to retrieve the dimension
|
||||
* information from the file.
|
||||
*
|
||||
* @return the dimension sizes of the data object.
|
||||
*/
|
||||
public abstract long[] getDims();
|
||||
|
||||
|
||||
/****************************************************************
|
||||
* * The following four definitions are used for data subsetting. * *
|
||||
****************************************************************/
|
||||
|
||||
/**
|
||||
* Returns the dimension sizes of the selected subset.
|
||||
* <p>
|
||||
* The SelectedDims is the number of data points of the selected subset.
|
||||
* Applications can use this array to change the size of selected subset.
|
||||
*
|
||||
* The selected size must be less than or equal to the current dimension size.
|
||||
* Combined with the starting position, selected sizes and stride, the subset of
|
||||
* a rectangle selection is fully defined.
|
||||
* <p>
|
||||
* For example, if a 4 X 5 dataset is as follows:
|
||||
*
|
||||
* <pre>
|
||||
* 0, 1, 2, 3, 4
|
||||
* 10, 11, 12, 13, 14
|
||||
* 20, 21, 22, 23, 24
|
||||
* 30, 31, 32, 33, 34
|
||||
* long[] dims = {4, 5};
|
||||
* long[] startDims = {1, 2};
|
||||
* long[] selectedDims = {3, 3};
|
||||
* long[] selectedStride = {1, 1};
|
||||
* then the following subset is selected by the startDims and selectedDims
|
||||
* 12, 13, 14
|
||||
* 22, 23, 24
|
||||
* 32, 33, 34
|
||||
* </pre>
|
||||
*
|
||||
* @return the dimension sizes of the selected subset.
|
||||
*/
|
||||
public abstract long[] getSelectedDims();
|
||||
|
||||
/**
|
||||
* Returns the starting position of a selected subset.
|
||||
* <p>
|
||||
* Applications can use this array to change the starting position of a
|
||||
* selection. Combined with the selected dimensions, selected sizes and stride,
|
||||
* the subset of a rectangle selection is fully defined.
|
||||
* <p>
|
||||
* For example, if a 4 X 5 dataset is as follows:
|
||||
*
|
||||
* <pre>
|
||||
* 0, 1, 2, 3, 4
|
||||
* 10, 11, 12, 13, 14
|
||||
* 20, 21, 22, 23, 24
|
||||
* 30, 31, 32, 33, 34
|
||||
* long[] dims = {4, 5};
|
||||
* long[] startDims = {1, 2};
|
||||
* long[] selectedDims = {3, 3};
|
||||
* long[] selectedStride = {1, 1};
|
||||
* then the following subset is selected by the startDims and selectedDims
|
||||
* 12, 13, 14
|
||||
* 22, 23, 24
|
||||
* 32, 33, 34
|
||||
* </pre>
|
||||
*
|
||||
* @return the starting position of a selected subset.
|
||||
*/
|
||||
public abstract long[] getStartDims();
|
||||
|
||||
/**
|
||||
* Returns the selectedStride of the selected dataset.
|
||||
* <p>
|
||||
* Applications can use this array to change how many elements to move in each
|
||||
* dimension.
|
||||
*
|
||||
* Combined with the starting position and selected sizes, the subset of a
|
||||
* rectangle selection is defined.
|
||||
* <p>
|
||||
* For example, if a 4 X 5 dataset is as follows:
|
||||
*
|
||||
* <pre>
|
||||
* 0, 1, 2, 3, 4
|
||||
* 10, 11, 12, 13, 14
|
||||
* 20, 21, 22, 23, 24
|
||||
* 30, 31, 32, 33, 34
|
||||
* long[] dims = {4, 5};
|
||||
* long[] startDims = {0, 0};
|
||||
* long[] selectedDims = {2, 2};
|
||||
* long[] selectedStride = {2, 3};
|
||||
* then the following subset is selected by the startDims and selectedDims
|
||||
* 0, 3
|
||||
* 20, 23
|
||||
* </pre>
|
||||
*
|
||||
* @return the selectedStride of the selected dataset.
|
||||
*/
|
||||
public abstract long[] getStride();
|
||||
|
||||
/**
|
||||
* Returns the indices of display order.
|
||||
* <p>
|
||||
*
|
||||
* selectedIndex[] is provided for two purposes:
|
||||
* <OL>
|
||||
* <LI>selectedIndex[] is used to indicate the order of dimensions for display.
|
||||
* selectedIndex[0] is for the row, selectedIndex[1] is for the column and
|
||||
* selectedIndex[2] for the depth.
|
||||
* <p>
|
||||
* For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3},
|
||||
* then dim[1] is selected as row index, dim[2] is selected as column index and
|
||||
* dim[3] is selected as depth index.
|
||||
* <LI>selectedIndex[] is also used to select dimensions for display for
|
||||
* datasets with three or more dimensions. We assume that applications such as
|
||||
* HDFView can only display data values up to three dimensions (2D
|
||||
* spreadsheet/image with a third dimension which the 2D spreadsheet/image is
|
||||
* selected from). For datasets with more than three dimensions, we need
|
||||
* selectedIndex[] to tell applications which three dimensions are chosen for
|
||||
* display. <br>
|
||||
* For example, for a four dimension dataset, if selectedIndex[] = {1, 2, 3},
|
||||
* then dim[1] is selected as row index, dim[2] is selected as column index and
|
||||
* dim[3] is selected as depth index. dim[0] is not selected. Its location is
|
||||
* fixed at 0 by default.
|
||||
* </OL>
|
||||
*
|
||||
* @return the array of the indices of display order.
|
||||
*/
|
||||
public int[] getSelectedIndex();
|
||||
|
||||
/**************************************************************************
|
||||
* * The following two definitions are used primarily for GUI applications. * *
|
||||
**************************************************************************/
|
||||
|
||||
/**
|
||||
* Returns the dimension size of the vertical axis.
|
||||
*
|
||||
* <p>
|
||||
* This function is used by GUI applications such as HDFView. GUI applications
|
||||
* display a dataset in a 2D table or 2D image. The display order is specified
|
||||
* by the index array of selectedIndex as follow:
|
||||
* <dl>
|
||||
* <dt>selectedIndex[0] -- height</dt>
|
||||
* <dd>The vertical axis</dd>
|
||||
* <dt>selectedIndex[1] -- width</dt>
|
||||
* <dd>The horizontal axis</dd>
|
||||
* <dt>selectedIndex[2] -- depth</dt>
|
||||
* <dd>The depth axis is used for 3 or more dimensional datasets.</dd>
|
||||
* </dl>
|
||||
* Applications can use getSelectedIndex() to access and change the display
|
||||
* order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the
|
||||
* following code will set the height=200 and width=50.
|
||||
*
|
||||
* <pre>
|
||||
* int[] selectedIndex = dataset.getSelectedIndex();
|
||||
* selectedIndex[0] = 0;
|
||||
* selectedIndex[1] = 1;
|
||||
* </pre>
|
||||
*
|
||||
* @see #getSelectedIndex()
|
||||
* @see #getWidth()
|
||||
*
|
||||
* @return the size of dimension of the vertical axis.
|
||||
*/
|
||||
public long getHeight();
|
||||
|
||||
/**
|
||||
* Returns the dimension size of the horizontal axis.
|
||||
*
|
||||
* <p>
|
||||
* This function is used by GUI applications such as HDFView. GUI applications
|
||||
* display a dataset in 2D Table or 2D Image. The display order is specified by
|
||||
* the index array of selectedIndex as follow:
|
||||
* <dl>
|
||||
* <dt>selectedIndex[0] -- height</dt>
|
||||
* <dd>The vertical axis</dd>
|
||||
* <dt>selectedIndex[1] -- width</dt>
|
||||
* <dd>The horizontal axis</dd>
|
||||
* <dt>selectedIndex[2] -- depth</dt>
|
||||
* <dd>The depth axis, which is used for 3 or more dimension datasets.</dd>
|
||||
* </dl>
|
||||
* Applications can use getSelectedIndex() to access and change the display
|
||||
* order. For example, in a 2D dataset of 200x50 (dim0=200, dim1=50), the
|
||||
* following code will set the height=200 and width=100.
|
||||
*
|
||||
* <pre>
|
||||
* int[] selectedIndex = dataset.getSelectedIndex();
|
||||
* selectedIndex[0] = 0;
|
||||
* selectedIndex[1] = 1;
|
||||
* </pre>
|
||||
*
|
||||
* @see #getSelectedIndex()
|
||||
* @see #getHeight()
|
||||
*
|
||||
* @return the size of dimension of the horizontal axis.
|
||||
*/
|
||||
public long getWidth();
|
||||
|
||||
/**
|
||||
* Returns the string representation of compression information.
|
||||
* <p>
|
||||
* For example, "SZIP: Pixels per block = 8: H5Z_FILTER_CONFIG_DECODE_ENABLED".
|
||||
*
|
||||
* @return the string representation of compression information.
|
||||
*/
|
||||
public abstract String getCompression();
|
||||
|
||||
/**
|
||||
* Get runtime Class of the original data buffer if converted.
|
||||
*
|
||||
* @return the Class of the original data buffer
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public abstract Class getOriginalClass();
|
||||
}
|
1297
src/main/java/hdf/object/Dataset.java
Normal file
1297
src/main/java/hdf/object/Dataset.java
Normal file
File diff suppressed because it is too large
Load Diff
933
src/main/java/hdf/object/Datatype.java
Normal file
933
src/main/java/hdf/object/Datatype.java
Normal file
@ -0,0 +1,933 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* Datatype is an abstract class that defines datatype characteristics and APIs for a data type.
|
||||
* <p>
|
||||
* A datatype has four basic characteristics: class, size, byte order and sign. These
|
||||
* characteristics are defined in the
|
||||
* <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>.
|
||||
* <p>
|
||||
* These characteristics apply to all the sub-classes. The sub-classes may have different ways to
|
||||
* describe a datatype. We here define the <strong> native datatype</strong> to the datatype used by
|
||||
* the sub-class. For example, H5Datatype uses a datatype identifier (hid_t) to specify a datatype.
|
||||
* NC2Datatype uses ucar.nc2.DataType object to describe its datatype. "Native" here is different
|
||||
* from the "native" definition in the HDF5 library.
|
||||
* <p>
|
||||
* Two functions, createNative() and fromNative(), are defined to convert the general
|
||||
* characteristics to/from the native datatype. Sub-classes must implement these functions so that
|
||||
* the conversion will be done correctly. The values of the CLASS member are not identical to HDF5
|
||||
* values for a datatype class.
|
||||
* <p>
|
||||
*
|
||||
* @version 1.1 9/4/2007
|
||||
* @author Peter X. Cao
|
||||
*/
|
||||
public abstract class Datatype extends HObject implements MetaDataContainer {
|
||||
|
||||
private static final long serialVersionUID = -581324710549963177L;
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Datatype.class);
|
||||
|
||||
/**
|
||||
* The default definition for datatype size, order, and sign.
|
||||
*/
|
||||
public static final int NATIVE = -1;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_NO_CLASS = -1;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_INTEGER = 0;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_FLOAT = 1;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_CHAR = 2;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_STRING = 3;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_BITFIELD = 4;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_OPAQUE = 5;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_COMPOUND = 6;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_REFERENCE = 7;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_ENUM = 8;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_VLEN = 9;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_ARRAY = 10;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int CLASS_TIME = 11;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int ORDER_LE = 0;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int ORDER_BE = 1;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int ORDER_VAX = 2;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int ORDER_NONE = 3;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int SIGN_NONE = 0;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int SIGN_2 = 1;
|
||||
|
||||
/**
|
||||
* See <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*/
|
||||
public static final int NSGN = 2;
|
||||
|
||||
protected String datatypeDescription = null;
|
||||
|
||||
/**
|
||||
* The class of the datatype.
|
||||
*/
|
||||
protected int datatypeClass;
|
||||
|
||||
/**
|
||||
* The size (in bytes) of the datatype.
|
||||
*/
|
||||
protected long datatypeSize;
|
||||
|
||||
/**
|
||||
* The byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, and
|
||||
* ORDER_VAX.
|
||||
*/
|
||||
protected int datatypeOrder;
|
||||
|
||||
/**
|
||||
* The sign of the datatype.
|
||||
*/
|
||||
protected int datatypeSign;
|
||||
|
||||
/**
|
||||
* The base datatype of this datatype (null if this datatype is atomic).
|
||||
*/
|
||||
protected Datatype baseType;
|
||||
|
||||
/**
|
||||
* The dimensions of the ARRAY element of an ARRAY datatype.
|
||||
*/
|
||||
protected long[] arrayDims;
|
||||
|
||||
/**
|
||||
* Determines whether this datatype is a variable-length type.
|
||||
*/
|
||||
protected boolean isVLEN = false;
|
||||
protected boolean isVariableStr = false;
|
||||
|
||||
/**
|
||||
* The (name, value) pairs of enum members.
|
||||
*/
|
||||
protected Map<String, String> enumMembers;
|
||||
|
||||
/**
|
||||
* The list of names of members of a compound Datatype.
|
||||
*/
|
||||
protected List<String> compoundMemberNames;
|
||||
|
||||
/**
|
||||
* The list of types of members of a compound Datatype.
|
||||
*/
|
||||
protected List<Datatype> compoundMemberTypes;
|
||||
|
||||
/**
|
||||
* The list of offsets of members of a compound Datatype.
|
||||
*/
|
||||
protected List<Long> compoundMemberOffsets;
|
||||
|
||||
/**
|
||||
* Constructs a named datatype with a given file, name and path.
|
||||
*
|
||||
* @param theFile
|
||||
* the HDF file.
|
||||
* @param typeName
|
||||
* the name of the datatype, e.g "12-bit Integer".
|
||||
* @param typePath
|
||||
* the full group path of the datatype, e.g. "/datatypes/".
|
||||
*/
|
||||
public Datatype(FileFormat theFile, String typeName, String typePath) {
|
||||
this(theFile, typeName, typePath, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not for public use in the future.<br>
|
||||
* Using {@link #Datatype(FileFormat, String, String)}
|
||||
*
|
||||
* @param theFile
|
||||
* the HDF file.
|
||||
* @param typeName
|
||||
* the name of the datatype, e.g "12-bit Integer".
|
||||
* @param typePath
|
||||
* the full group path of the datatype, e.g. "/datatypes/".
|
||||
* @param oid
|
||||
* the oidof the datatype.
|
||||
*/
|
||||
@Deprecated
|
||||
public Datatype(FileFormat theFile, String typeName, String typePath, long[] oid) {
|
||||
super(theFile, typeName, typePath, oid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Datatype with specified class, size, byte order and sign.
|
||||
* <p>
|
||||
* The following is a list of a few examples of Datatype.
|
||||
* <ol>
|
||||
* <li>to create unsigned native integer<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||
* <li>to create 16-bit signed integer with big endian<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
|
||||
* <li>to create native float<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* <li>to create 64-bit double<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* </ol>
|
||||
*
|
||||
* @param tclass
|
||||
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc.
|
||||
* @param tsize
|
||||
* the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4.
|
||||
* Valid values are NATIVE or a positive value.
|
||||
* @param torder
|
||||
* the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX,
|
||||
* ORDER_NONE and NATIVE.
|
||||
* @param tsign
|
||||
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
|
||||
*
|
||||
* @throws Exception
|
||||
* if there is an error
|
||||
*/
|
||||
public Datatype(int tclass, int tsize, int torder, int tsign) throws Exception {
|
||||
this(tclass, tsize, torder, tsign, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Datatype with specified class, size, byte order and sign.
|
||||
* <p>
|
||||
* The following is a list of a few examples of Datatype.
|
||||
* <ol>
|
||||
* <li>to create unsigned native integer<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||
* <li>to create 16-bit signed integer with big endian<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
|
||||
* <li>to create native float<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* <li>to create 64-bit double<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* </ol>
|
||||
*
|
||||
* @param tclass
|
||||
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and
|
||||
* etc.
|
||||
* @param tsize
|
||||
* the size of the datatype in bytes, e.g. for a 32-bit integer,
|
||||
* the size is 4.
|
||||
* Valid values are NATIVE or a positive value.
|
||||
* @param torder
|
||||
* the byte order of the datatype. Valid values are ORDER_LE,
|
||||
* ORDER_BE, ORDER_VAX, ORDER_NONE and NATIVE.
|
||||
* @param tsign
|
||||
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
|
||||
* @param tbase
|
||||
* the base datatype of the new datatype
|
||||
*
|
||||
* @throws Exception
|
||||
* if there is an error
|
||||
*/
|
||||
public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception {
|
||||
this(tclass, tsize, torder, tsign, tbase, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Datatype with specified class, size, byte order and sign.
|
||||
* <p>
|
||||
* The following is a list of a few examples of Datatype.
|
||||
* <ol>
|
||||
* <li>to create unsigned native integer<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||
* <li>to create 16-bit signed integer with big endian<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_INTEGER, 2, Datatype.ORDER_BE, Datatype.NATIVE);
|
||||
* <li>to create native float<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* <li>to create 64-bit double<br>
|
||||
* Datatype type = new Dataype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, Datatype.NATIVE);
|
||||
* </ol>
|
||||
*
|
||||
* @param tclass
|
||||
* the class of the datatype, e.g. CLASS_INTEGER, CLASS_FLOAT and etc.
|
||||
* @param tsize
|
||||
* the size of the datatype in bytes, e.g. for a 32-bit integer, the size is 4.
|
||||
* Valid values are NATIVE or a positive value.
|
||||
* @param torder
|
||||
* the byte order of the datatype. Valid values are ORDER_LE, ORDER_BE, ORDER_VAX,
|
||||
* ORDER_NONE and NATIVE.
|
||||
* @param tsign
|
||||
* the sign of the datatype. Valid values are SIGN_NONE, SIGN_2 and NATIVE.
|
||||
* @param tbase
|
||||
* the base datatype of the new datatype
|
||||
* @param pbase
|
||||
* the parent datatype of the new datatype
|
||||
*
|
||||
* @throws Exception
|
||||
* if there is an error
|
||||
*/
|
||||
public Datatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, Datatype pbase) throws Exception {
|
||||
if ((tsize == 0) || (tsize < 0 && tsize != NATIVE))
|
||||
throw new Exception("invalid datatype size - " + tsize);
|
||||
if ((torder != ORDER_LE) && (torder != ORDER_BE) && (torder != ORDER_VAX)
|
||||
&& (torder != ORDER_NONE) && (torder != NATIVE))
|
||||
throw new Exception("invalid datatype order - " + torder);
|
||||
if ((tsign != SIGN_NONE) && (tsign != SIGN_2) && (tsign != NATIVE))
|
||||
throw new Exception("invalid datatype sign - " + tsign);
|
||||
|
||||
datatypeClass = tclass;
|
||||
datatypeSize = tsize;
|
||||
datatypeOrder = torder;
|
||||
datatypeSign = tsign;
|
||||
enumMembers = null;
|
||||
baseType = tbase;
|
||||
arrayDims = null;
|
||||
isVariableStr = (datatypeClass == Datatype.CLASS_STRING) && (tsize < 0);
|
||||
isVLEN = (datatypeClass == Datatype.CLASS_VLEN) || isVariableStr;
|
||||
|
||||
compoundMemberNames = new ArrayList<>();
|
||||
compoundMemberTypes = new ArrayList<>();
|
||||
compoundMemberOffsets = new ArrayList<>();
|
||||
|
||||
log.trace("datatypeClass={} datatypeSize={} datatypeOrder={} datatypeSign={} baseType={}",
|
||||
datatypeClass, datatypeSize, datatypeOrder, datatypeSign, baseType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Datatype with a given native datatype identifier.
|
||||
* <p>
|
||||
* For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5,
|
||||
*
|
||||
* <pre>
|
||||
* long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
|
||||
* Datatype dtype = new Datatype(tid);
|
||||
* </pre>
|
||||
*
|
||||
* will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
|
||||
*
|
||||
* @see #fromNative(long tid)
|
||||
* @param tid
|
||||
* the native datatype identifier.
|
||||
*
|
||||
* @throws Exception
|
||||
* if there is an error
|
||||
*/
|
||||
public Datatype(long tid) throws Exception {
|
||||
this(tid, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Datatype with a given native datatype identifier.
|
||||
* <p>
|
||||
* For example, if the datatype identifier is a 32-bit unsigned integer created from HDF5,
|
||||
*
|
||||
* <pre>
|
||||
* long tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
|
||||
* Datatype dtype = new Datatype(tid);
|
||||
* </pre>
|
||||
*
|
||||
* will construct a datatype equivalent to new Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
|
||||
*
|
||||
* @see #fromNative(long tid)
|
||||
* @param tid
|
||||
* the native datatype identifier.
|
||||
* @param pbase
|
||||
* the parent datatype of the new datatype
|
||||
*
|
||||
* @throws Exception
|
||||
* if there is an error
|
||||
*/
|
||||
public Datatype(long tid, Datatype pbase) throws Exception {
|
||||
this(CLASS_NO_CLASS, NATIVE, NATIVE, NATIVE, null, pbase);
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens access to this named datatype. Sub-classes must replace this default implementation. For
|
||||
* example, in H5Datatype, open() function H5.H5Topen(loc_id, name) to get the datatype identifier.
|
||||
*
|
||||
* @return the datatype identifier if successful; otherwise returns negative value.
|
||||
*/
|
||||
@Override
|
||||
public long open() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a datatype identifier.
|
||||
* <p>
|
||||
* Sub-classes must replace this default implementation.
|
||||
*
|
||||
* @param id
|
||||
* the datatype identifier to close.
|
||||
*/
|
||||
@Override
|
||||
public abstract void close(long id);
|
||||
|
||||
/**
|
||||
* Returns the class of the datatype. Valid values are:
|
||||
* <ul>
|
||||
* <li>CLASS_NO_CLASS
|
||||
* <li>CLASS_INTEGER
|
||||
* <li>CLASS_FLOAT
|
||||
* <li>CLASS_CHAR
|
||||
* <li>CLASS_STRING
|
||||
* <li>CLASS_BITFIELD
|
||||
* <li>CLASS_OPAQUE
|
||||
* <li>CLASS_COMPOUND
|
||||
* <li>CLASS_REFERENCE
|
||||
* <li>CLASS_ENUM
|
||||
* <li>CLASS_VLEN
|
||||
* <li>CLASS_ARRAY
|
||||
* </ul>
|
||||
*
|
||||
* @return the class of the datatype.
|
||||
*/
|
||||
public int getDatatypeClass() {
|
||||
return datatypeClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size of the datatype in bytes. For example, for a 32-bit
|
||||
* integer, the size is 4 (bytes).
|
||||
*
|
||||
* @return the size of the datatype.
|
||||
*/
|
||||
public long getDatatypeSize() {
|
||||
return datatypeSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte order of the datatype. Valid values are
|
||||
* <ul>
|
||||
* <li>ORDER_LE
|
||||
* <li>ORDER_BE
|
||||
* <li>ORDER_VAX
|
||||
* <li>ORDER_NONE
|
||||
* </ul>
|
||||
*
|
||||
* @return the byte order of the datatype.
|
||||
*/
|
||||
public int getDatatypeOrder() {
|
||||
return datatypeOrder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sign (SIGN_NONE, SIGN_2) of an integer datatype.
|
||||
*
|
||||
* @return the sign of the datatype.
|
||||
*/
|
||||
public int getDatatypeSign() {
|
||||
return datatypeSign;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base datatype for this datatype.
|
||||
* <p>
|
||||
* For example, in a dataset of type ARRAY of integer, the datatype of the dataset is ARRAY. The
|
||||
* datatype of the base type is integer.
|
||||
*
|
||||
* @return the datatype of the contained basetype.
|
||||
*/
|
||||
public Datatype getDatatypeBase() {
|
||||
return baseType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the (key, value) pairs of enum members for enum datatype.
|
||||
* <p>
|
||||
* For Example,
|
||||
* <dl>
|
||||
* <dt>setEnumMembers("-40=lowTemp, 90=highTemp")</dt>
|
||||
* <dd>sets the key of enum member lowTemp to -40 and highTemp to 90.</dd>
|
||||
* <dt>setEnumMembers("lowTemp, highTemp")</dt>
|
||||
* <dd>sets enum members to defaults, i.e. 0=lowTemp and 1=highTemp</dd>
|
||||
* <dt>setEnumMembers("10=lowTemp, highTemp")</dt>
|
||||
* <dd>sets enum member lowTemp to 10 and highTemp to 11.</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @param enumStr
|
||||
* the (key, value) pairs of enum members
|
||||
*/
|
||||
public final void setEnumMembers(String enumStr) {
|
||||
log.trace("setEnumMembers: is_enum enum_members={}", enumStr);
|
||||
enumMembers = new HashMap<>();
|
||||
String[] entries = enumStr.split(",");
|
||||
for (String entry : entries) {
|
||||
String[] keyValue = entry.split("=");
|
||||
enumMembers.put(keyValue[0].trim(), keyValue[1].trim());
|
||||
if (log.isTraceEnabled())
|
||||
log.trace("setEnumMembers: is_enum value={} name={}", keyValue[0].trim(), keyValue[1].trim());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Map<String,String> pairs of enum members for enum datatype.
|
||||
*
|
||||
* @return enumStr Map<String,String%gt; pairs of enum members
|
||||
*/
|
||||
public final Map<String, String> getEnumMembers() {
|
||||
if (enumMembers == null) {
|
||||
enumMembers = new HashMap<>();
|
||||
enumMembers.put("1", "0");
|
||||
enumMembers.put("2", "1");
|
||||
}
|
||||
|
||||
return enumMembers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the HashMap pairs of enum members for enum datatype.
|
||||
* <p>
|
||||
* For Example,
|
||||
* <dl>
|
||||
* <dt>getEnumMembersAsString()</dt>
|
||||
* <dd>returns "10=lowTemp, 40=highTemp"</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @return enumStr the (key, value) pairs of enum members
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public final String getEnumMembersAsString() {
|
||||
if (enumMembers == null) {
|
||||
enumMembers = new HashMap<>();
|
||||
enumMembers.put("1", "0");
|
||||
enumMembers.put("2", "1");
|
||||
}
|
||||
|
||||
StringBuilder enumStr = new StringBuilder();
|
||||
Iterator<Entry<String, String>> entries = enumMembers.entrySet().iterator();
|
||||
int i = enumMembers.size();
|
||||
while (entries.hasNext()) {
|
||||
Entry thisEntry = entries.next();
|
||||
enumStr.append((String) thisEntry.getKey())
|
||||
.append("=")
|
||||
.append((String) thisEntry.getValue());
|
||||
|
||||
i--;
|
||||
if (i > 0)
|
||||
enumStr.append(", ");
|
||||
}
|
||||
return enumStr.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the dimensions of an Array Datatype.
|
||||
*
|
||||
* @return dims the dimensions of the Array Datatype
|
||||
*/
|
||||
public final long[] getArrayDims() {
|
||||
return arrayDims;
|
||||
}
|
||||
|
||||
public final List<String> getCompoundMemberNames() {
|
||||
return compoundMemberNames;
|
||||
}
|
||||
|
||||
public final List<Datatype> getCompoundMemberTypes() {
|
||||
return compoundMemberTypes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the datatype object to a native datatype.
|
||||
*
|
||||
* Subclasses must implement it so that this datatype will be converted accordingly. Use close() to
|
||||
* close the native identifier; otherwise, the datatype will be left open.
|
||||
* <p>
|
||||
* For example, a HDF5 datatype created from<br>
|
||||
*
|
||||
* <pre>
|
||||
* H5Dataype dtype = new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
|
||||
* int tid = dtype.createNative();
|
||||
* </pre>
|
||||
*
|
||||
* The "tid" will be the HDF5 datatype id of a 64-bit unsigned integer, which is equivalent to
|
||||
*
|
||||
* <pre>
|
||||
* int tid = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_UNINT32);
|
||||
* </pre>
|
||||
*
|
||||
* @return the identifier of the native datatype.
|
||||
*/
|
||||
public abstract long createNative();
|
||||
|
||||
/**
|
||||
* Set datatype characteristics (class, size, byte order and sign) from a given datatype identifier.
|
||||
* <p>
|
||||
* Sub-classes must implement it so that this datatype will be converted accordingly.
|
||||
* <p>
|
||||
* For example, if the type identifier is a 64-bit unsigned integer created from HDF5,
|
||||
*
|
||||
* <pre>
|
||||
* H5Datatype dtype = new H5Datatype();
|
||||
* dtype.fromNative(HDF5Constants.H5T_NATIVE_UNINT32);
|
||||
* </pre>
|
||||
*
|
||||
* Where dtype is equivalent to <br>
|
||||
* new H5Datatype(CLASS_INTEGER, 4, NATIVE, SIGN_NONE);
|
||||
*
|
||||
* @param nativeID
|
||||
* the datatype identifier.
|
||||
*/
|
||||
public abstract void fromNative(long nativeID);
|
||||
|
||||
/**
|
||||
* Returns a short text description of this datatype.
|
||||
*
|
||||
* @return a short text description of this datatype
|
||||
*/
|
||||
public String getDescription() {
|
||||
log.trace("getDescription(): start");
|
||||
|
||||
if (datatypeDescription != null) {
|
||||
log.trace("getDescription(): finish");
|
||||
return datatypeDescription;
|
||||
}
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
|
||||
switch (datatypeClass) {
|
||||
case CLASS_CHAR:
|
||||
description.append("8-bit ").append((isUnsigned() ? "unsigned " : "")).append("integer");
|
||||
break;
|
||||
case CLASS_INTEGER:
|
||||
if (datatypeSize == NATIVE)
|
||||
description.append("native ").append((isUnsigned() ? "unsigned " : "")).append("integer");
|
||||
else
|
||||
description.append(String.valueOf(datatypeSize * 8)).append("-bit ")
|
||||
.append((isUnsigned() ? "unsigned " : "")).append("integer");
|
||||
break;
|
||||
case CLASS_FLOAT:
|
||||
if (datatypeSize == NATIVE)
|
||||
description.append("native floating-point");
|
||||
else
|
||||
description.append(String.valueOf(datatypeSize * 8)).append("-bit floating-point");
|
||||
break;
|
||||
case CLASS_STRING:
|
||||
description.append("String");
|
||||
break;
|
||||
case CLASS_REFERENCE:
|
||||
description.append("Object reference");
|
||||
break;
|
||||
case CLASS_OPAQUE:
|
||||
if (datatypeSize == NATIVE)
|
||||
description.append("native opaque");
|
||||
else
|
||||
description.append(String.valueOf(datatypeSize * 8)).append("-bit opaque");
|
||||
break;
|
||||
case CLASS_BITFIELD:
|
||||
if (datatypeSize == NATIVE)
|
||||
description.append("native bitfield");
|
||||
else
|
||||
description.append(String.valueOf(datatypeSize * 8)).append("-bit bitfield");
|
||||
break;
|
||||
case CLASS_ENUM:
|
||||
if (datatypeSize == NATIVE)
|
||||
description.append("native enum");
|
||||
else
|
||||
description.append(String.valueOf(datatypeSize * 8)).append("-bit enum");
|
||||
break;
|
||||
case CLASS_ARRAY:
|
||||
description.append("Array");
|
||||
|
||||
if (arrayDims != null) {
|
||||
description.append(" [");
|
||||
for (int i = 0; i < arrayDims.length; i++) {
|
||||
description.append(arrayDims[i]);
|
||||
if (i < arrayDims.length - 1)
|
||||
description.append(" x ");
|
||||
}
|
||||
description.append("]");
|
||||
}
|
||||
|
||||
break;
|
||||
case CLASS_COMPOUND:
|
||||
description.append("Compound");
|
||||
break;
|
||||
case CLASS_VLEN:
|
||||
description.append("Variable-length");
|
||||
break;
|
||||
default:
|
||||
description.append("Unknown");
|
||||
break;
|
||||
}
|
||||
|
||||
if (baseType != null) {
|
||||
description.append(" of " + baseType.getDescription());
|
||||
}
|
||||
|
||||
log.trace("getDescription(): finish");
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is unsigned.
|
||||
*
|
||||
* @return true if the datatype is unsigned;
|
||||
* otherwise, returns false.
|
||||
*/
|
||||
public boolean isUnsigned() {
|
||||
if (baseType != null)
|
||||
return baseType.isUnsigned();
|
||||
else {
|
||||
if (isCompound()) {
|
||||
if ((compoundMemberTypes != null) && !compoundMemberTypes.isEmpty()) {
|
||||
boolean allMembersUnsigned = true;
|
||||
|
||||
Iterator<Datatype> cmpdTypeListIT = compoundMemberTypes.iterator();
|
||||
while (cmpdTypeListIT.hasNext()) {
|
||||
Datatype next = cmpdTypeListIT.next();
|
||||
|
||||
allMembersUnsigned = allMembersUnsigned && next.isUnsigned();
|
||||
}
|
||||
|
||||
return allMembersUnsigned;
|
||||
}
|
||||
else {
|
||||
log.debug("isUnsigned(): compoundMemberTypes is null");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return (datatypeSign == Datatype.SIGN_NONE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public abstract boolean isText();
|
||||
|
||||
/**
|
||||
* Checks if this datatype is an integer type.
|
||||
*
|
||||
* @return true if the datatype is integer; false otherwise
|
||||
*/
|
||||
public boolean isInteger() {
|
||||
return (datatypeClass == Datatype.CLASS_INTEGER);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a floating-point type.
|
||||
*
|
||||
* @return true if the datatype is floating-point; false otherwise
|
||||
*/
|
||||
public boolean isFloat() {
|
||||
return (datatypeClass == Datatype.CLASS_FLOAT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a variable-length string type.
|
||||
*
|
||||
* @return true if the datatype is variable-length string; false otherwise
|
||||
*/
|
||||
public boolean isVarStr() {
|
||||
return isVariableStr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a variable-length type.
|
||||
*
|
||||
* @return true if the datatype is variable-length; false otherwise
|
||||
*/
|
||||
public boolean isVLEN() {
|
||||
return isVLEN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is an compound type.
|
||||
*
|
||||
* @return true if the datatype is compound; false otherwise
|
||||
*/
|
||||
public boolean isCompound() {
|
||||
return (datatypeClass == Datatype.CLASS_COMPOUND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is an array type.
|
||||
*
|
||||
* @return true if the datatype is array; false otherwise
|
||||
*/
|
||||
public boolean isArray() {
|
||||
return (datatypeClass == Datatype.CLASS_ARRAY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a string type.
|
||||
*
|
||||
* @return true if the datatype is string; false otherwise
|
||||
*/
|
||||
public boolean isString() {
|
||||
return (datatypeClass == Datatype.CLASS_STRING);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a character type.
|
||||
*
|
||||
* @return true if the datatype is character; false otherwise
|
||||
*/
|
||||
public boolean isChar() {
|
||||
return (datatypeClass == Datatype.CLASS_CHAR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a reference type.
|
||||
*
|
||||
* @return true if the datatype is reference; false otherwise
|
||||
*/
|
||||
public boolean isRef() {
|
||||
return (datatypeClass == Datatype.CLASS_REFERENCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a enum type.
|
||||
*
|
||||
* @return true if the datatype is enum; false otherwise
|
||||
*/
|
||||
public boolean isEnum() {
|
||||
return (datatypeClass == Datatype.CLASS_ENUM);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a opaque type.
|
||||
*
|
||||
* @return true if the datatype is opaque; false otherwise
|
||||
*/
|
||||
public boolean isOpaque() {
|
||||
return (datatypeClass == Datatype.CLASS_OPAQUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this datatype is a bitfield type.
|
||||
*
|
||||
* @return true if the datatype is bitfield; false otherwise
|
||||
*/
|
||||
public boolean isBitField() {
|
||||
return (datatypeClass == Datatype.CLASS_BITFIELD);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#getMetadata()
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List getMetadata() throws Exception {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void writeMetadata(Object info) throws Exception {
|
||||
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:writeMetadata.");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void removeMetadata(Object info) throws Exception {
|
||||
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:removeMetadata.");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void updateMetadata(Object info) throws Exception {
|
||||
throw new UnsupportedOperationException("Unsupported operation. Subclasses must implement Datatype:updateMetadata.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getDescription();
|
||||
}
|
||||
}
|
2083
src/main/java/hdf/object/FileFormat.java
Normal file
2083
src/main/java/hdf/object/FileFormat.java
Normal file
File diff suppressed because it is too large
Load Diff
326
src/main/java/hdf/object/Group.java
Normal file
326
src/main/java/hdf/object/Group.java
Normal file
@ -0,0 +1,326 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.Stack;
|
||||
import java.util.Vector;
|
||||
|
||||
/**
|
||||
* Group is an abstract class. Current implementing classes are the H4Group and
|
||||
* H5Group. This class includes general information of a group object such as
|
||||
* members of a group and common operations on groups.
|
||||
* <p>
|
||||
* Members of a group may include other groups, datasets or links.
|
||||
*
|
||||
* @version 1.1 9/4/2007
|
||||
* @author Peter X. Cao
|
||||
*/
|
||||
public abstract class Group extends HObject implements MetaDataContainer {
|
||||
|
||||
private static final long serialVersionUID = 3913174542591568052L;
|
||||
|
||||
/**
|
||||
* The list of members (Groups and Datasets) of this group in memory.
|
||||
*/
|
||||
private List<HObject> memberList;
|
||||
|
||||
/**
|
||||
* The parent group where this group is located. The parent of the root
|
||||
* group is null.
|
||||
*/
|
||||
protected Group parent;
|
||||
|
||||
/**
|
||||
* Total number of members of this group in file.
|
||||
*/
|
||||
protected int nMembersInFile;
|
||||
|
||||
public static final int LINK_TYPE_HARD = 0;
|
||||
|
||||
public static final int LINK_TYPE_SOFT = 1;
|
||||
|
||||
public static final int LINK_TYPE_EXTERNAL = 64;
|
||||
|
||||
public static final int CRT_ORDER_TRACKED = 1;
|
||||
|
||||
public static final int CRT_ORDER_INDEXED = 2;
|
||||
|
||||
|
||||
/**
|
||||
* Constructs an instance of the group with specific name, path and parent
|
||||
* group. An HDF data object must have a name. The path is the group path
|
||||
* starting from the root. The parent group is the group where this group is
|
||||
* located.
|
||||
* <p>
|
||||
* For example, in H5Group(h5file, "grp", "/groups/", pgroup), "grp" is the
|
||||
* name of the group, "/groups/" is the group path of the group, and pgroup
|
||||
* is the group where "grp" is located.
|
||||
*
|
||||
* @param theFile
|
||||
* the file containing the group.
|
||||
* @param grpName
|
||||
* the name of this group, e.g. "grp01".
|
||||
* @param grpPath
|
||||
* the full path of this group, e.g. "/groups/".
|
||||
* @param grpParent
|
||||
* the parent of this group.
|
||||
*/
|
||||
public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent) {
|
||||
this(theFile, grpName, grpPath, grpParent, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not for public use in the future.<br>
|
||||
* Using {@link #Group(FileFormat, String, String, Group)}
|
||||
*
|
||||
* @param theFile
|
||||
* the file containing the group.
|
||||
* @param grpName
|
||||
* the name of this group, e.g. "grp01".
|
||||
* @param grpPath
|
||||
* the full path of this group, e.g. "/groups/".
|
||||
* @param grpParent
|
||||
* the parent of this group.
|
||||
* @param oid
|
||||
* the oid of this group.
|
||||
*/
|
||||
@Deprecated
|
||||
public Group(FileFormat theFile, String grpName, String grpPath, Group grpParent, long[] oid) {
|
||||
super(theFile, grpName, grpPath, oid);
|
||||
|
||||
this.parent = grpParent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears up member list and other resources in memory for the group. Since
|
||||
* the destructor will clear memory space, the function is usually not
|
||||
* needed.
|
||||
*/
|
||||
public void clear() {
|
||||
if (memberList != null) {
|
||||
((Vector<HObject>) memberList).setSize(0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an object to the member list of this group in memory.
|
||||
*
|
||||
* @param object
|
||||
* the HObject to be added to the member list.
|
||||
*/
|
||||
public void addToMemberList(HObject object) {
|
||||
if (memberList == null) {
|
||||
int size = Math.min(getNumberOfMembersInFile(), this
|
||||
.getFileFormat().getMaxMembers());
|
||||
memberList = new Vector<>(size + 5);
|
||||
}
|
||||
|
||||
if ((object != null) && !memberList.contains(object)) {
|
||||
memberList.add(object);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an object from the member list of this group in memory.
|
||||
*
|
||||
* @param object
|
||||
* the HObject (Group or Dataset) to be removed from the member
|
||||
* list.
|
||||
*/
|
||||
public void removeFromMemberList(HObject object) {
|
||||
if (memberList != null) {
|
||||
memberList.remove(object);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of members of this group. The list is an java.util.List
|
||||
* containing HObjects.
|
||||
*
|
||||
* @return the list of members of this group.
|
||||
*/
|
||||
public List<HObject> getMemberList() {
|
||||
FileFormat theFile = this.getFileFormat();
|
||||
|
||||
if ((memberList == null) && (theFile != null)) {
|
||||
int size = Math.min(getNumberOfMembersInFile(), this.getFileFormat().getMaxMembers());
|
||||
memberList = new Vector<>(size + 5); // avoid infinite loop search for groups without members
|
||||
|
||||
// find the memberList from the file by checking the group path and
|
||||
// name. group may be created out of the structure tree
|
||||
// (H4/5File.loadTree()).
|
||||
if (theFile.getFID() < 0) {
|
||||
try {
|
||||
theFile.open();
|
||||
} // load the file structure;
|
||||
catch (Exception ex) {
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
HObject root = theFile.getRootObject();
|
||||
if (root == null) return memberList;
|
||||
|
||||
Iterator<HObject> it = ((Group) root).depthFirstMemberList().iterator();
|
||||
Group g = null;
|
||||
Object uObj = null;
|
||||
while (it.hasNext()) {
|
||||
uObj = it.next();
|
||||
|
||||
if (uObj instanceof Group) {
|
||||
g = (Group) uObj;
|
||||
if (g.getPath() != null) // add this check to get rid of null exception
|
||||
{
|
||||
if ((this.isRoot() && g.isRoot())
|
||||
|| (this.getPath().equals(g.getPath()) &&
|
||||
g.getName().endsWith(this.getName()))) {
|
||||
memberList = g.getMemberList();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return memberList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the members of this Group in breadth-first order.
|
||||
*/
|
||||
public List<HObject> breadthFirstMemberList() {
|
||||
Vector<HObject> members = new Vector<>();
|
||||
Queue<HObject> queue = new LinkedList<>();
|
||||
HObject currentObj = this;
|
||||
|
||||
queue.addAll(((Group) currentObj).getMemberList());
|
||||
|
||||
while(!queue.isEmpty()) {
|
||||
currentObj = queue.remove();
|
||||
members.add(currentObj);
|
||||
|
||||
if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) {
|
||||
queue.addAll(((Group) currentObj).getMemberList());
|
||||
}
|
||||
}
|
||||
|
||||
return members;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the members of this Group in depth-first order.
|
||||
*/
|
||||
public List<HObject> depthFirstMemberList() {
|
||||
Vector<HObject> members = new Vector<>();
|
||||
Stack<HObject> stack = new Stack<>();
|
||||
HObject currentObj = this;
|
||||
|
||||
// Push elements onto the stack in reverse order
|
||||
List<HObject> list = ((Group) currentObj).getMemberList();
|
||||
for(int i = list.size() - 1; i >= 0; i--) {
|
||||
stack.push(list.get(i));
|
||||
}
|
||||
|
||||
while(!stack.empty()) {
|
||||
currentObj = stack.pop();
|
||||
members.add(currentObj);
|
||||
|
||||
if(currentObj instanceof Group && ((Group) currentObj).getNumberOfMembersInFile() > 0) {
|
||||
list = ((Group) currentObj).getMemberList();
|
||||
for(int i = list.size() - 1; i >= 0; i--) {
|
||||
stack.push(list.get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return members;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name of the group.
|
||||
* <p>
|
||||
* setName (String newName) changes the name of the group in memory and
|
||||
* file.
|
||||
* <p>
|
||||
* setName() updates the path in memory for all the objects that are under
|
||||
* the group with the new name.
|
||||
*
|
||||
* @param newName
|
||||
* The new name of the group.
|
||||
*
|
||||
* @throws Exception if the name can not be set
|
||||
*/
|
||||
@Override
|
||||
public void setName(String newName) throws Exception {
|
||||
super.setName(newName);
|
||||
|
||||
if (memberList != null) {
|
||||
int n = memberList.size();
|
||||
HObject theObj = null;
|
||||
for (int i = 0; i < n; i++) {
|
||||
theObj = memberList.get(i);
|
||||
theObj.setPath(this.getPath() + newName + HObject.SEPARATOR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @return the parent group. */
|
||||
public final Group getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if it is a root group.
|
||||
*
|
||||
* @return true if the group is a root group; otherwise, returns false.
|
||||
*/
|
||||
public final boolean isRoot() {
|
||||
return (parent == null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the total number of members of this group in file.
|
||||
*
|
||||
* Current Java applications such as HDFView cannot handle files with large
|
||||
* numbers of objects (1,000,000 or more objects) due to JVM memory
|
||||
* limitation. The max_members is used so that applications such as HDFView
|
||||
* will load up to <i>max_members</i> number of objects. If the number of
|
||||
* objects in file is larger than <i>max_members</i>, only
|
||||
* <i>max_members</i> are loaded in memory.
|
||||
* <p>
|
||||
* getNumberOfMembersInFile() returns the number of objects in this group.
|
||||
* The number of objects in memory is obtained by getMemberList().size().
|
||||
*
|
||||
* @return Total number of members of this group in the file.
|
||||
*/
|
||||
public int getNumberOfMembersInFile() {
|
||||
return nMembersInFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HObject at the specified index in this Group's member list.
|
||||
* @param idx The index of the HObject to get.
|
||||
* @return The HObject at the specified index.
|
||||
*/
|
||||
public HObject getMember(int idx) {
|
||||
if(memberList.size() <= 0 || idx >= memberList.size()) return null;
|
||||
|
||||
return memberList.get(idx);
|
||||
}
|
||||
}
|
562
src/main/java/hdf/object/HObject.java
Normal file
562
src/main/java/hdf/object/HObject.java
Normal file
@ -0,0 +1,562 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* The HObject class is the root class of all the HDF data objects. Every data
|
||||
* class has HObject as a superclass. All objects implement the methods of this
|
||||
* class. The following is the inherited structure of HDF Objects.
|
||||
*
|
||||
* <pre>
|
||||
* HObject
|
||||
* __________________________|________________________________
|
||||
* | | |
|
||||
* Group Dataset Datatype
|
||||
* | _________|___________ |
|
||||
* | | | |
|
||||
* | ScalarDS CompoundDS |
|
||||
* | | | |
|
||||
* ---------------------Implementing classes such as-------------------------
|
||||
* ____|____ _____|______ _____|_____ _____|_____
|
||||
* | | | | | | | |
|
||||
* H5Group H4Group H5ScalarDS H4ScalarDS H5CompDS H4CompDS H5Datatype H4Datatype
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* All HDF4 and HDF5 data objects are inherited from HObject. At the top level
|
||||
* of the hierarchy, both HDF4 and HDF5 have the same super-classes, such as
|
||||
* Group and Dataset. At the bottom level of the hierarchy, HDF4 and HDF5
|
||||
* objects have their own implementation, such as H5Group, H5ScalarDS,
|
||||
* H5CompoundDS, and H5Datatype.
|
||||
* <p>
|
||||
* <b>Warning: HDF4 and HDF5 may have multiple links to the same object. Data
|
||||
* objects in this model do not deal with multiple links. Users may create
|
||||
* duplicate copies of the same data object with different paths. Applications
|
||||
* should check the OID of the data object to avoid duplicate copies of the same
|
||||
* object.</b>
|
||||
* <p>
|
||||
* HDF4 objects are uniquely identified by the OID (tag_id, ref_id) pair. The
|
||||
* ref_id is the object reference count. The tag_id is a pre-defined number to
|
||||
* identify the type of object. For example, DFTAG_RI is for raster image,
|
||||
* DFTAG_SD is for scientific dataset, and DFTAG_VG is for Vgroup.
|
||||
* <p>
|
||||
* HDF5 objects are uniquely identified by the OID containing just the object
|
||||
* reference. The OID is usually obtained by H5Rcreate(). The following example
|
||||
* shows how to retrieve an object ID from a file:
|
||||
*
|
||||
* <pre>
|
||||
* // retrieve the object ID
|
||||
* try {
|
||||
* byte[] ref_buf = H5.H5Rcreate(h5file.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
|
||||
* long[] oid = new long[1];
|
||||
* oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
|
||||
* } catch (Exception ex) {
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @version 2.0 4/2/2018
|
||||
* @author Peter X. Cao, Jordan T. Henderson
|
||||
* @see <a href="DataFormat.html">hdf.object.DataFormat</a>
|
||||
*/
|
||||
public abstract class HObject implements Serializable {
|
||||
|
||||
/**
|
||||
* The serialVersionUID is a universal version identifier for a Serializable
|
||||
* class. Deserialization uses this number to ensure that a loaded class
|
||||
* corresponds exactly to a serialized object. For details, see
|
||||
* http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html
|
||||
*/
|
||||
private static final long serialVersionUID = -1723666708199882519L;
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(HObject.class);
|
||||
|
||||
/**
|
||||
* The separator of object path, i.e. "/".
|
||||
*/
|
||||
public static final String SEPARATOR = "/";
|
||||
|
||||
/**
|
||||
* The full path of the file that contains the object.
|
||||
*/
|
||||
private String filename;
|
||||
|
||||
/**
|
||||
* The file which contains the object
|
||||
*/
|
||||
protected final FileFormat fileFormat;
|
||||
|
||||
/**
|
||||
* The name of the data object. The root group has its default name, a
|
||||
* slash. The name can be changed except the root group.
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* The full path of the data object. The full path always starts with the
|
||||
* root, a slash. The path cannot be changed. Also, a path must be ended with a
|
||||
* slash. For example, /arrays/ints/
|
||||
*/
|
||||
private String path;
|
||||
|
||||
/** The full name of the data object, i.e. "path + name" */
|
||||
private String fullName;
|
||||
|
||||
/**
|
||||
* Array of long integer storing unique identifier for the object.
|
||||
* <p>
|
||||
* HDF4 objects are uniquely identified by a (tag_id, ref_id) pair. i.e.
|
||||
* oid[0] = tag, oid[1] = ref_id.<br>
|
||||
* HDF5 objects are uniquely identified by an object reference. i.e.
|
||||
* oid[0] = obj_id.
|
||||
*/
|
||||
protected long[] oid;
|
||||
|
||||
/**
|
||||
* The name of the Target Object that is being linked to.
|
||||
*/
|
||||
protected String linkTargetObjName;
|
||||
|
||||
/**
|
||||
* Number of attributes attached to the object.
|
||||
*/
|
||||
// protected int nAttributes = -1;
|
||||
|
||||
/**
|
||||
* Constructs an instance of a data object without name and path.
|
||||
*/
|
||||
public HObject() {
|
||||
this(null, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an instance of a data object with specific name and path.
|
||||
* <p>
|
||||
* For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name
|
||||
* of the dataset, "/arrays" is the group path of the dataset.
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the data object.
|
||||
* @param theName
|
||||
* the name of the data object, e.g. "dset".
|
||||
* @param thePath
|
||||
* the group path of the data object, e.g. "/arrays".
|
||||
*/
|
||||
public HObject(FileFormat theFile, String theName, String thePath) {
|
||||
this(theFile, theName, thePath, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an instance of a data object with specific name and path.
|
||||
* <p>
|
||||
* For example, in H5ScalarDS(h5file, "dset", "/arrays"), "dset" is the name
|
||||
* of the dataset, "/arrays" is the group path of the dataset.
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the data object.
|
||||
* @param theName
|
||||
* the name of the data object, e.g. "dset".
|
||||
* @param thePath
|
||||
* the group path of the data object, e.g. "/arrays".
|
||||
* @param oid
|
||||
* the ids of the data object.
|
||||
*/
|
||||
@Deprecated
|
||||
public HObject(FileFormat theFile, String theName, String thePath, long[] oid) {
|
||||
this.fileFormat = theFile;
|
||||
this.oid = oid;
|
||||
|
||||
if (fileFormat != null) {
|
||||
this.filename = fileFormat.getFilePath();
|
||||
}
|
||||
else {
|
||||
this.filename = null;
|
||||
}
|
||||
|
||||
// file name is packed in the full path
|
||||
if ((theName == null) && (thePath != null)) {
|
||||
if (thePath.equals(SEPARATOR)) {
|
||||
theName = SEPARATOR;
|
||||
thePath = null;
|
||||
}
|
||||
else {
|
||||
// the path must starts with "/"
|
||||
if (!thePath.startsWith(HObject.SEPARATOR)) {
|
||||
thePath = HObject.SEPARATOR + thePath;
|
||||
}
|
||||
|
||||
// get rid of the last "/"
|
||||
if (thePath.endsWith(HObject.SEPARATOR)) {
|
||||
thePath = thePath.substring(0, thePath.length() - 1);
|
||||
}
|
||||
|
||||
// separate the name and the path
|
||||
theName = thePath.substring(thePath.lastIndexOf(SEPARATOR) + 1);
|
||||
thePath = thePath.substring(0, thePath.lastIndexOf(SEPARATOR));
|
||||
}
|
||||
}
|
||||
else if ((theName != null) && (thePath == null) && (theName.indexOf(SEPARATOR) >= 0)) {
|
||||
if (theName.equals(SEPARATOR)) {
|
||||
theName = SEPARATOR;
|
||||
thePath = null;
|
||||
}
|
||||
else {
|
||||
// the full name must starts with "/"
|
||||
if (!theName.startsWith(SEPARATOR)) {
|
||||
theName = SEPARATOR + theName;
|
||||
}
|
||||
|
||||
// the fullname must not end with "/"
|
||||
int n = theName.length();
|
||||
if (theName.endsWith(SEPARATOR)) {
|
||||
theName = theName.substring(0, n - 1);
|
||||
}
|
||||
|
||||
int idx = theName.lastIndexOf(SEPARATOR);
|
||||
if (idx < 0) {
|
||||
thePath = SEPARATOR;
|
||||
}
|
||||
else {
|
||||
thePath = theName.substring(0, idx);
|
||||
theName = theName.substring(idx + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// the path must start and end with "/"
|
||||
if (thePath != null) {
|
||||
thePath = thePath.replaceAll("//", "/");
|
||||
if (!thePath.endsWith(SEPARATOR)) {
|
||||
thePath += SEPARATOR;
|
||||
}
|
||||
}
|
||||
|
||||
this.name = theName;
|
||||
this.path = thePath;
|
||||
|
||||
log.trace("name={} path={}", this.name, this.path);
|
||||
|
||||
if (thePath != null) {
|
||||
this.fullName = thePath + theName;
|
||||
}
|
||||
else {
|
||||
if (theName == null) {
|
||||
this.fullName = "/";
|
||||
}
|
||||
else if (theName.startsWith("/")) {
|
||||
this.fullName = theName;
|
||||
}
|
||||
else {
|
||||
if (this instanceof Attribute)
|
||||
this.fullName = theName;
|
||||
else
|
||||
this.fullName = "/" + theName;
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("fullName={}", this.fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print out debug information
|
||||
* <p>
|
||||
*
|
||||
* @param msg
|
||||
* the debug message to print
|
||||
*/
|
||||
protected final void debug(Object msg) {
|
||||
System.out.println("*** " + this.getClass().getName() + ": " + msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the file that contains this data object.
|
||||
* <p>
|
||||
* The file name is necessary because the file of this data object is
|
||||
* uniquely identified when multiple files are opened by an application at
|
||||
* the same time.
|
||||
*
|
||||
* @return The full path (path + name) of the file.
|
||||
*/
|
||||
public final String getFile() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the object. For example, "Raster Image #2".
|
||||
*
|
||||
* @return The name of the object.
|
||||
*/
|
||||
public final String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the target object that is linked to.
|
||||
*
|
||||
* @return The name of the object that is linked to.
|
||||
*/
|
||||
public final String getLinkTargetObjName() {
|
||||
return linkTargetObjName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name of the target object that is linked to.
|
||||
*
|
||||
* @param targetObjName
|
||||
* The new name of the object.
|
||||
*/
|
||||
public final void setLinkTargetObjName(String targetObjName) {
|
||||
linkTargetObjName = targetObjName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the full name (group path + object name) of the object. For
|
||||
* example, "/Images/Raster Image #2"
|
||||
*
|
||||
* @return The full name (group path + object name) of the object.
|
||||
*/
|
||||
public final String getFullName() {
|
||||
return fullName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the group path of the object. For example, "/Images".
|
||||
*
|
||||
* @return The group path of the object.
|
||||
*/
|
||||
public final String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name of the object.
|
||||
*
|
||||
* setName (String newName) changes the name of the object in the file.
|
||||
*
|
||||
* @param newName
|
||||
* The new name of the object.
|
||||
*
|
||||
* @throws Exception if name is root or contains separator
|
||||
*/
|
||||
public void setName(String newName) throws Exception {
|
||||
if (newName != null) {
|
||||
if (newName.equals(HObject.SEPARATOR)) {
|
||||
throw new IllegalArgumentException("The new name cannot be the root");
|
||||
}
|
||||
|
||||
if (newName.startsWith(HObject.SEPARATOR)) {
|
||||
newName = newName.substring(1);
|
||||
}
|
||||
|
||||
if (newName.endsWith(HObject.SEPARATOR)) {
|
||||
newName = newName.substring(0, newName.length() - 2);
|
||||
}
|
||||
|
||||
if (newName.contains(HObject.SEPARATOR)) {
|
||||
throw new IllegalArgumentException("The new name contains the SEPARATOR character: " + HObject.SEPARATOR);
|
||||
}
|
||||
}
|
||||
|
||||
name = newName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the path of the object.
|
||||
* <p>
|
||||
* setPath() is needed to change the path for an object when the name of a
|
||||
* group containing the object is changed by setName(). The path of the
|
||||
* object in memory under this group should be updated to the new path to
|
||||
* the group. Unlike setName(), setPath() does not change anything in file.
|
||||
*
|
||||
* @param newPath
|
||||
* The new path of the object.
|
||||
*
|
||||
* @throws Exception if a failure occurred
|
||||
*/
|
||||
public void setPath(String newPath) throws Exception {
|
||||
if (newPath == null) {
|
||||
newPath = "/";
|
||||
}
|
||||
|
||||
path = newPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens an existing object such as a dataset or group for access.
|
||||
*
|
||||
* The return value is an object identifier obtained by implementing classes
|
||||
* such as H5.H5Dopen(). This function is needed to allow other objects to
|
||||
* be able to access the object. For instance, H5File class uses the open()
|
||||
* function to obtain object identifier for copyAttributes(long src_id, long
|
||||
* dst_id) and other purposes. The open() function should be used in pair
|
||||
* with close(long) function.
|
||||
*
|
||||
* @see HObject#close(long)
|
||||
*
|
||||
* @return the object identifier if successful; otherwise returns a negative
|
||||
* value.
|
||||
*/
|
||||
public abstract long open();
|
||||
|
||||
/**
|
||||
* Closes access to the object.
|
||||
* <p>
|
||||
* Sub-classes must implement this interface because different data objects
|
||||
* have their own ways of how the data resources are closed.
|
||||
* <p>
|
||||
* For example, H5Group.close() calls the hdf.hdf5lib.H5.H5Gclose()
|
||||
* method and closes the group resource specified by the group id.
|
||||
*
|
||||
* @param id
|
||||
* The object identifier.
|
||||
*/
|
||||
public abstract void close(long id);
|
||||
|
||||
/**
|
||||
* Returns the file identifier of of the file containing the object.
|
||||
*
|
||||
* @return the file identifier of of the file containing the object.
|
||||
*/
|
||||
public final long getFID() {
|
||||
if (fileFormat != null) {
|
||||
return fileFormat.getFID();
|
||||
}
|
||||
else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the file that contains the object.
|
||||
*
|
||||
* @return The file that contains the object.
|
||||
*/
|
||||
public final FileFormat getFileFormat() {
|
||||
return fileFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a cloned copy of the object identifier.
|
||||
* <p>
|
||||
* The object OID cannot be modified once it is created. getOID() clones the object OID to ensure
|
||||
* the object OID cannot be modified outside of this class.
|
||||
*
|
||||
* @return the cloned copy of the object OID.
|
||||
*/
|
||||
public final long[] getOID() {
|
||||
if (oid == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return oid.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the OID of the object is the same as the given object identifier within the same file.
|
||||
* <p>
|
||||
* HDF4 and HDF5 data objects are identified by their unique OIDs. A data object in a file may have
|
||||
* multiple logical names , which are represented in a graph structure as separate objects.
|
||||
* <p>
|
||||
* The HObject.equalsOID(long[] theID) can be used to check if two data objects with different names
|
||||
* are pointed to the same object within the same file.
|
||||
*
|
||||
* @param theID
|
||||
* The list object identifiers.
|
||||
*
|
||||
* @return true if the ID of the object equals the given OID; otherwise, returns false.
|
||||
*/
|
||||
public final boolean equalsOID(long[] theID) {
|
||||
if ((theID == null) || (oid == null)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int n1 = theID.length;
|
||||
int n2 = oid.length;
|
||||
|
||||
if (n1 == 0 || n2 == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int n = Math.min(n1, n2);
|
||||
boolean isMatched = (theID[0] == oid[0]);
|
||||
|
||||
for (int i = 1; isMatched && (i < n); i++) {
|
||||
isMatched = (theID[i] == oid[i]);
|
||||
}
|
||||
|
||||
return isMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the object.
|
||||
* <p>
|
||||
* This method overwrites the toString() method in the Java Object class
|
||||
* (the root class of all Java objects) so that it returns the name of the
|
||||
* HObject instead of the name of the class.
|
||||
* <p>
|
||||
* For example, toString() returns "Raster Image #2" instead of
|
||||
* "hdf.object.h4.H4SDS".
|
||||
*
|
||||
* @return The name of the object.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
if (this instanceof Group) {
|
||||
if (((Group) this).isRoot() && this.getFileFormat() != null) return this.getFileFormat().getName();
|
||||
}
|
||||
|
||||
if (name != null) return name;
|
||||
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this HObject is equal to the specified HObject by comparing their OIDs.
|
||||
*
|
||||
* @param obj
|
||||
* The object
|
||||
*
|
||||
* @return true if the object is equal by OID
|
||||
*/
|
||||
public boolean equals(HObject obj) {
|
||||
// Cast down to Object to avoid infinite recursion
|
||||
if (this.equals((Object) obj))
|
||||
return true;
|
||||
|
||||
// comparing the state of OID with
|
||||
// the state of 'this' OID.
|
||||
return this.equalsOID(obj.getOID());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null)
|
||||
return false;
|
||||
|
||||
// checking if both the object references are
|
||||
// referring to the same object.
|
||||
if (this == obj)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// We are returning the OID as a hashcode value.
|
||||
return (int) oid[0];
|
||||
}
|
||||
}
|
91
src/main/java/hdf/object/MetaDataContainer.java
Normal file
91
src/main/java/hdf/object/MetaDataContainer.java
Normal file
@ -0,0 +1,91 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An interface that provides general I/O operations for object metadata
|
||||
* attached to an object. For example, reading metadata content from the file
|
||||
* into memory or writing metadata content from memory into the file.
|
||||
* <p>
|
||||
*
|
||||
* @see HObject
|
||||
*
|
||||
* @version 2.0 4/2/2018
|
||||
* @author Peter X. Cao, Jordan T. Henderson
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public interface MetaDataContainer {
|
||||
/**
|
||||
* Retrieves the object's metadata, such as attributes, from the file.
|
||||
* <p>
|
||||
* Metadata, such as attributes, is stored in a List.
|
||||
*
|
||||
* @return the list of metadata objects.
|
||||
*
|
||||
* @throws Exception
|
||||
* if the metadata can not be retrieved
|
||||
*/
|
||||
public abstract List getMetadata() throws Exception;
|
||||
|
||||
/**
|
||||
* Writes a specific piece of metadata (such as an attribute) into the file.
|
||||
*
|
||||
* If an HDF(4&5) attribute exists in the file, this method updates its
|
||||
* value. If the attribute does not exist in the file, it creates the
|
||||
* attribute in the file and attaches it to the object. It will fail to
|
||||
* write a new attribute to the object where an attribute with the same name
|
||||
* already exists. To update the value of an existing attribute in the file,
|
||||
* one needs to get the instance of the attribute by getMetadata(), change
|
||||
* its values, then use writeMetadata() to write the value.
|
||||
*
|
||||
* @param metadata
|
||||
* the metadata to write.
|
||||
*
|
||||
* @throws Exception
|
||||
* if the metadata can not be written
|
||||
*/
|
||||
public abstract void writeMetadata(Object metadata) throws Exception;
|
||||
|
||||
/**
|
||||
* Deletes an existing piece of metadata from this object.
|
||||
*
|
||||
* @param metadata
|
||||
* the metadata to delete.
|
||||
*
|
||||
* @throws Exception
|
||||
* if the metadata can not be removed
|
||||
*/
|
||||
public abstract void removeMetadata(Object metadata) throws Exception;
|
||||
|
||||
/**
|
||||
* Updates an existing piece of metadata attached to this object.
|
||||
*
|
||||
* @param metadata
|
||||
* the metadata to update.
|
||||
*
|
||||
* @throws Exception
|
||||
* if the metadata can not be updated
|
||||
*/
|
||||
public abstract void updateMetadata(Object metadata) throws Exception;
|
||||
|
||||
/**
|
||||
* Check if the object has any attributes attached.
|
||||
*
|
||||
* @return true if it has any attributes, false otherwise.
|
||||
*/
|
||||
public abstract boolean hasAttribute();
|
||||
}
|
450
src/main/java/hdf/object/ScalarDS.java
Normal file
450
src/main/java/hdf/object/ScalarDS.java
Normal file
@ -0,0 +1,450 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
/**
|
||||
* A scalar dataset is a multiple dimension array of scalar points. The Datatype of a scalar dataset must be an atomic
|
||||
* datatype. Common datatypes of scalar datasets include char, byte, short, int, long, float, double and string.
|
||||
* <p>
|
||||
* A ScalarDS can be an image or spreadsheet data. ScalarDS defines methods to deal with both images and
|
||||
* spreadsheets.
|
||||
* <p>
|
||||
* ScalarDS is an abstract class. Current implementing classes are the H4SDS, H5GRImage and H5ScalarDS.
|
||||
*
|
||||
* @version 1.1 9/4/2007
|
||||
* @author Peter X. Cao
|
||||
*/
|
||||
public abstract class ScalarDS extends Dataset {
|
||||
private static final long serialVersionUID = 8925371455928203981L;
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ScalarDS.class);
|
||||
|
||||
/************************************************************
|
||||
* The following constant strings are copied from *
|
||||
* https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html *
|
||||
* to make the definition consistent with the image specs. *
|
||||
************************************************************/
|
||||
|
||||
/**
|
||||
* Indicates that the pixel RGB values are contiguous.
|
||||
*/
|
||||
public static final int INTERLACE_PIXEL = 0;
|
||||
|
||||
/** Indicates that each pixel component of RGB is stored as a scan line. */
|
||||
public static final int INTERLACE_LINE = 1;
|
||||
|
||||
/** Indicates that each pixel component of RGB is stored as a plane. */
|
||||
public static final int INTERLACE_PLANE = 2;
|
||||
|
||||
/**
|
||||
* The interlace mode of the stored raster image data. Valid values are INTERLACE_PIXEL, INTERLACE_LINE and
|
||||
* INTERLACE_PLANE.
|
||||
*/
|
||||
protected int interlace;
|
||||
|
||||
/**
|
||||
* The min-max range of image data values. For example, [0, 255] indicates the min is 0, and the max is 255.
|
||||
*/
|
||||
protected double[] imageDataRange;
|
||||
|
||||
/**
|
||||
* The indexed RGB color model with 256 colors.
|
||||
* <p>
|
||||
* The palette values are stored in a two-dimensional byte array and arrange by color components of red, green and
|
||||
* blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue
|
||||
* components respectively.
|
||||
*/
|
||||
protected byte[][] palette;
|
||||
|
||||
/**
|
||||
* True if this dataset is an image.
|
||||
*/
|
||||
protected boolean isImage;
|
||||
|
||||
/**
|
||||
* True if this dataset is a true color image.
|
||||
*/
|
||||
protected boolean isTrueColor;
|
||||
|
||||
/**
|
||||
* True if this dataset is ASCII text.
|
||||
*/
|
||||
protected boolean isText;
|
||||
|
||||
/**
|
||||
* Flag to indicate is the original unsigned C data is converted.
|
||||
*/
|
||||
protected boolean unsignedConverted;
|
||||
|
||||
/** The fill value of the dataset. */
|
||||
protected Object fillValue = null;
|
||||
|
||||
private List<Number> filteredImageValues;
|
||||
|
||||
/** Flag to indicate if the dataset is displayed as an image. */
|
||||
protected boolean isImageDisplay;
|
||||
|
||||
/**
|
||||
* Flag to indicate if the dataset is displayed as an image with default order of dimensions.
|
||||
*/
|
||||
protected boolean isDefaultImageOrder;
|
||||
|
||||
/**
|
||||
* Flag to indicate if the FillValue is converted from unsigned C.
|
||||
*/
|
||||
public boolean isFillValueConverted;
|
||||
|
||||
/**
|
||||
* Constructs an instance of a ScalarDS with specific name and path. An HDF data object must have a name. The path
|
||||
* is the group path starting from the root.
|
||||
* <p>
|
||||
* For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
|
||||
* path of the dataset.
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the data object.
|
||||
* @param theName
|
||||
* the name of the data object, e.g. "dset".
|
||||
* @param thePath
|
||||
* the full path of the data object, e.g. "/arrays/".
|
||||
*/
|
||||
public ScalarDS(FileFormat theFile, String theName, String thePath) {
|
||||
this(theFile, theName, thePath, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not for public use in the future.<br>
|
||||
* Using {@link #ScalarDS(FileFormat, String, String)}
|
||||
*
|
||||
* @param theFile
|
||||
* the file that contains the data object.
|
||||
* @param theName
|
||||
* the name of the data object, e.g. "dset".
|
||||
* @param thePath
|
||||
* the full path of the data object, e.g. "/arrays/".
|
||||
* @param oid
|
||||
* the v of the data object.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
|
||||
super(theFile, theName, thePath, oid);
|
||||
|
||||
palette = null;
|
||||
isImage = false;
|
||||
isTrueColor = false;
|
||||
isText = false;
|
||||
interlace = -1;
|
||||
imageDataRange = null;
|
||||
isImageDisplay = false;
|
||||
isDefaultImageOrder = true;
|
||||
isFillValueConverted = false;
|
||||
filteredImageValues = new Vector<>();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.Dataset#clearData()
|
||||
*/
|
||||
@Override
|
||||
public void clearData() {
|
||||
super.clearData();
|
||||
unsignedConverted = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the data values of this dataset to appropriate Java integer if they are unsigned integers.
|
||||
*
|
||||
* @see Dataset#convertToUnsignedC(Object)
|
||||
* @see Dataset#convertFromUnsignedC(Object, Object)
|
||||
*
|
||||
* @return the converted data buffer.
|
||||
*/
|
||||
@Override
|
||||
public Object convertFromUnsignedC() {
|
||||
log.trace("convertFromUnsignedC(): start");
|
||||
// keep a copy of original buffer and the converted buffer
|
||||
// so that they can be reused later to save memory
|
||||
log.trace("convertFromUnsignedC(): unsigned={}", getDatatype().isUnsigned());
|
||||
if ((data != null) && getDatatype().isUnsigned() && !unsignedConverted) {
|
||||
log.trace("convertFromUnsignedC(): convert");
|
||||
originalBuf = data;
|
||||
convertedBuf = convertFromUnsignedC(originalBuf, convertedBuf);
|
||||
data = convertedBuf;
|
||||
unsignedConverted = true;
|
||||
|
||||
if (fillValue != null) {
|
||||
if (!isFillValueConverted) {
|
||||
fillValue = convertFromUnsignedC(fillValue, null);
|
||||
isFillValueConverted = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("convertFromUnsignedC(): finish");
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts Java integer data of this dataset back to unsigned C-type integer data if they are unsigned integers.
|
||||
*
|
||||
* @see Dataset#convertToUnsignedC(Object)
|
||||
* @see Dataset#convertToUnsignedC(Object, Object)
|
||||
* @see #convertFromUnsignedC(Object data_in)
|
||||
*
|
||||
* @return the converted data buffer.
|
||||
*/
|
||||
@Override
|
||||
public Object convertToUnsignedC() {
|
||||
log.trace("convertToUnsignedC(): start");
|
||||
// keep a copy of original buffer and the converted buffer
|
||||
// so that they can be reused later to save memory
|
||||
log.trace("convertToUnsignedC(): unsigned={}", getDatatype().isUnsigned());
|
||||
if ((data != null) && getDatatype().isUnsigned()) {
|
||||
log.trace("convertToUnsignedC(): convert");
|
||||
convertedBuf = data;
|
||||
originalBuf = convertToUnsignedC(convertedBuf, originalBuf);
|
||||
data = originalBuf;
|
||||
}
|
||||
|
||||
log.trace("convertToUnsignedC(): finish");
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the palette of this scalar dataset or null if palette does not exist.
|
||||
* <p>
|
||||
* A Scalar dataset can be displayed as spreadsheet data or an image. When a scalar dataset is displayed as an
|
||||
* image, the palette or color table may be needed to translate a pixel value to color components (for example, red,
|
||||
* green, and blue). Some scalar datasets have no palette and some datasets have one or more than one palettes. If
|
||||
* an associated palette exists but is not loaded, this interface retrieves the palette from the file and returns the
|
||||
* palette. If the palette is loaded, it returns the palette. It returns null if there is no palette associated with
|
||||
* the dataset.
|
||||
* <p>
|
||||
* Current implementation only supports palette model of indexed RGB with 256 colors. Other models such as
|
||||
* YUV", "CMY", "CMYK", "YCbCr", "HSV will be supported in the future.
|
||||
* <p>
|
||||
* The palette values are stored in a two-dimensional byte array and are arranges by color components of red, green and
|
||||
* blue. palette[][] = byte[3][256], where, palette[0][], palette[1][] and palette[2][] are the red, green and blue
|
||||
* components respectively.
|
||||
* <p>
|
||||
* Sub-classes have to implement this interface. HDF4 and HDF5 images use different libraries to retrieve the
|
||||
* associated palette.
|
||||
*
|
||||
* @return the 2D palette byte array.
|
||||
*/
|
||||
public abstract byte[][] getPalette();
|
||||
|
||||
/**
|
||||
* Sets the palette for this dataset.
|
||||
*
|
||||
* @param pal
|
||||
* the 2D palette byte array.
|
||||
*/
|
||||
public final void setPalette(byte[][] pal) {
|
||||
palette = pal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a specific image palette from file.
|
||||
* <p>
|
||||
* A scalar dataset may have multiple palettes attached to it. readPalette(int idx) returns a specific palette
|
||||
* identified by its index.
|
||||
*
|
||||
* @param idx
|
||||
* the index of the palette to read.
|
||||
*
|
||||
* @return the image palette
|
||||
*/
|
||||
public abstract byte[][] readPalette(int idx);
|
||||
|
||||
/**
|
||||
* Get the name of a specific image palette from file.
|
||||
* <p>
|
||||
* A scalar dataset may have multiple palettes attached to it. getPaletteName(int idx) returns the name of a
|
||||
* specific palette identified by its index.
|
||||
*
|
||||
* @param idx
|
||||
* the index of the palette to retrieve the name.
|
||||
*
|
||||
* @return The name of the palette
|
||||
*/
|
||||
public String getPaletteName(int idx) {
|
||||
String paletteName = "Default ";
|
||||
if (idx != 0)
|
||||
paletteName = "Default " + idx;
|
||||
return paletteName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte array of palette refs.
|
||||
* <p>
|
||||
* A palette reference is an object reference that points to the palette dataset.
|
||||
* <p>
|
||||
* For example, Dataset "Iceberg" has an attribute of object reference "Palette". The arrtibute "Palette" has value
|
||||
* "2538" that is the object reference of the palette data set "Iceberg Palette".
|
||||
*
|
||||
* @return null if there is no palette attribute attached to this dataset.
|
||||
*/
|
||||
public abstract byte[] getPaletteRefs();
|
||||
|
||||
/**
|
||||
* Returns true if this dataset is an image.
|
||||
* <p>
|
||||
* For all Images, they must have an attribute called "CLASS". The value of this attribute is "IMAGE". For more
|
||||
* details, read <a href="https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>
|
||||
*
|
||||
* @return true if the dataset is an image; otherwise, returns false.
|
||||
*/
|
||||
public final boolean isImage() {
|
||||
return isImage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this dataset is displayed as an image.
|
||||
* <p>
|
||||
* A ScalarDS can be displayed as an image or a spreadsheet in a table.
|
||||
*
|
||||
* @return true if this dataset is displayed as an image; otherwise, returns false.
|
||||
*/
|
||||
public final boolean isImageDisplay() {
|
||||
|
||||
return isImageDisplay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this dataset is displayed as an image with default image order.
|
||||
* <p>
|
||||
* A ScalarDS can be displayed as an image with different orders of dimensions.
|
||||
*
|
||||
* @return true if this dataset is displayed as an image with default image order; otherwise, returns false.
|
||||
*/
|
||||
public final boolean isDefaultImageOrder() {
|
||||
return isDefaultImageOrder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the flag to display the dataset as an image.
|
||||
*
|
||||
* @param b
|
||||
* if b is true, display the dataset as an image
|
||||
*/
|
||||
public final void setIsImageDisplay(boolean b) {
|
||||
isImageDisplay = b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the flag to indicate this dataset is an image.
|
||||
*
|
||||
* @param b
|
||||
* if b is true, the dataset is an image.
|
||||
*/
|
||||
public final void setIsImage(boolean b) {
|
||||
isImage = b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets data range for an image.
|
||||
*
|
||||
* @param min
|
||||
* the data range start.
|
||||
* @param max
|
||||
* the data range end.
|
||||
*/
|
||||
public final void setImageDataRange(double min, double max) {
|
||||
if (max <= min)
|
||||
return;
|
||||
|
||||
if (imageDataRange == null)
|
||||
imageDataRange = new double[2];
|
||||
|
||||
imageDataRange[0] = min;
|
||||
imageDataRange[1] = max;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a value that will be filtered out in an image.
|
||||
*
|
||||
* @param x
|
||||
* value to be filtered
|
||||
*/
|
||||
public void addFilteredImageValue(Number x) {
|
||||
Iterator<Number> it = filteredImageValues.iterator();
|
||||
while (it.hasNext()) {
|
||||
if (it.next().toString().equals(x.toString()))
|
||||
return;
|
||||
}
|
||||
|
||||
filteredImageValues.add(x);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of values that will be filtered out in an image.
|
||||
*
|
||||
* @return the list of Image values
|
||||
*/
|
||||
public List<Number> getFilteredImageValues() {
|
||||
return filteredImageValues;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if this dataset is a true color image.
|
||||
*
|
||||
*/
|
||||
|
||||
public final boolean isTrueColor() {
|
||||
return isTrueColor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the interlace mode of a true color image (RGB).
|
||||
*
|
||||
* Valid values:
|
||||
*
|
||||
* <pre>
|
||||
* INTERLACE_PIXEL -- RGB components are contiguous, i.e. rgb, rgb, rgb, ...
|
||||
* INTERLACE_LINE -- each RGB component is stored as a scan line
|
||||
* INTERLACE_PLANE -- each RGB component is stored as a plane
|
||||
* </pre>
|
||||
*
|
||||
* @return the interlace mode of a true color image (RGB).
|
||||
*/
|
||||
public final int getInterlace() {
|
||||
return interlace;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the (min, max) pair of image data range.
|
||||
*
|
||||
* @return the (min, max) pair of image data range.
|
||||
*/
|
||||
public double[] getImageDataRange() {
|
||||
return imageDataRange;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the fill values for the dataset.
|
||||
*
|
||||
* @return the fill values for the dataset.
|
||||
*/
|
||||
@Override
|
||||
public final Object getFillValue() {
|
||||
return fillValue;
|
||||
}
|
||||
}
|
48
src/main/java/hdf/object/Utils.java
Normal file
48
src/main/java/hdf/object/Utils.java
Normal file
@ -0,0 +1,48 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object;
|
||||
|
||||
public final class Utils {
|
||||
private Utils() {
|
||||
throw new IllegalStateException("Utility class");
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the Java Runtime Class of the given Object. B = byte array, S = short array, I = int
|
||||
* array, J = long array, F = float array, D = double array, L = class or interface
|
||||
*
|
||||
* @param o
|
||||
* the Object to determine the Runtime Class of
|
||||
* @return the Java Runtime Class of the given Object.
|
||||
*/
|
||||
public static char getJavaObjectRuntimeClass(Object o) {
|
||||
if (o == null)
|
||||
return ' ';
|
||||
|
||||
String cName = o.getClass().getName();
|
||||
|
||||
if (cName.equals("java.lang.String") || cName.equals("java.util.Vector")
|
||||
|| cName.equals("java.util.Arrays$ArrayList") || cName.equals("java.util.ArrayList"))
|
||||
return 'L';
|
||||
|
||||
int cIndex = cName.lastIndexOf('[');
|
||||
if (cIndex >= 0) {
|
||||
return cName.charAt(cIndex + 1);
|
||||
}
|
||||
|
||||
return ' ';
|
||||
}
|
||||
|
||||
}
|
2255
src/main/java/hdf/object/h5/H5CompoundDS.java
Normal file
2255
src/main/java/hdf/object/h5/H5CompoundDS.java
Normal file
File diff suppressed because it is too large
Load Diff
2164
src/main/java/hdf/object/h5/H5Datatype.java
Normal file
2164
src/main/java/hdf/object/h5/H5Datatype.java
Normal file
File diff suppressed because it is too large
Load Diff
3175
src/main/java/hdf/object/h5/H5File.java
Normal file
3175
src/main/java/hdf/object/h5/H5File.java
Normal file
File diff suppressed because it is too large
Load Diff
506
src/main/java/hdf/object/h5/H5Group.java
Normal file
506
src/main/java/hdf/object/h5/H5Group.java
Normal file
@ -0,0 +1,506 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object.h5;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
import hdf.hdf5lib.H5;
|
||||
import hdf.hdf5lib.HDF5Constants;
|
||||
import hdf.hdf5lib.HDFNativeData;
|
||||
import hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import hdf.hdf5lib.structs.H5G_info_t;
|
||||
import hdf.hdf5lib.structs.H5O_info_t;
|
||||
import hdf.object.Attribute;
|
||||
import hdf.object.FileFormat;
|
||||
import hdf.object.Group;
|
||||
import hdf.object.HObject;
|
||||
|
||||
/**
|
||||
* An H5Group object represents an existing HDF5 group in file.
|
||||
* <p>
|
||||
* In HDF5, every object has at least one name. An HDF5 group is used to store a
|
||||
* set of the names together in one place, i.e. a group. The general structure
|
||||
* of a group is similar to that of the UNIX file system in that the group may
|
||||
* contain references to other groups or data objects just as the UNIX directory
|
||||
* may contain sub-directories or files.
|
||||
* <p>
|
||||
* For more information on HDF5 Groups,
|
||||
*
|
||||
* <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 User's Guide</a>
|
||||
*
|
||||
* @version 1.1 9/4/2007
|
||||
* @author Peter X. Cao
|
||||
*/
|
||||
public class H5Group extends Group {
|
||||
|
||||
private static final long serialVersionUID = -951164512330444150L;
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Group.class);
|
||||
|
||||
/**
|
||||
* The list of attributes of this data object. Members of the list are
|
||||
* instance of Attribute.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
protected List attributeList;
|
||||
|
||||
private int nAttributes = -1;
|
||||
|
||||
private H5O_info_t obj_info;
|
||||
|
||||
/**
|
||||
* Constructs an HDF5 group with specific name, path, and parent.
|
||||
*
|
||||
* @param theFile
|
||||
* the file which containing the group.
|
||||
* @param name
|
||||
* the name of this group, e.g. "grp01".
|
||||
* @param path
|
||||
* the full path of this group, e.g. "/groups/".
|
||||
* @param parent
|
||||
* the parent of this group.
|
||||
*/
|
||||
public H5Group(FileFormat theFile, String name, String path, Group parent) {
|
||||
this(theFile, name, path, parent, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Not for public use in the future.<br>
|
||||
* Using {@link #H5Group(FileFormat, String, String, Group)}
|
||||
*
|
||||
* @param theFile
|
||||
* the file which containing the group.
|
||||
* @param name
|
||||
* the name of this group, e.g. "grp01".
|
||||
* @param path
|
||||
* the full path of this group, e.g. "/groups/".
|
||||
* @param parent
|
||||
* the parent of this group.
|
||||
* @param oid
|
||||
* the oid of this group.
|
||||
*/
|
||||
@Deprecated
|
||||
public H5Group(FileFormat theFile, String name, String path, Group parent, long[] oid) {
|
||||
super(theFile, name, path, parent, oid);
|
||||
nMembersInFile = -1;
|
||||
obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
|
||||
|
||||
if ((oid == null) && (theFile != null)) {
|
||||
// retrieve the object ID
|
||||
try {
|
||||
byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
|
||||
this.oid = new long[1];
|
||||
this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
this.oid = new long[1];
|
||||
this.oid[0] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#hasAttribute()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasAttribute() {
|
||||
obj_info.num_attrs = nAttributes;
|
||||
|
||||
if (obj_info.num_attrs < 0) {
|
||||
long gid = open();
|
||||
if (gid > 0) {
|
||||
try {
|
||||
obj_info = H5.H5Oget_info(gid);
|
||||
|
||||
}
|
||||
catch (Exception ex) {
|
||||
obj_info.num_attrs = 0;
|
||||
}
|
||||
close(gid);
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
|
||||
|
||||
return (obj_info.num_attrs > 0);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.Group#getNumberOfMembersInFile()
|
||||
*/
|
||||
@Override
|
||||
public int getNumberOfMembersInFile() {
|
||||
if (nMembersInFile < 0) {
|
||||
long gid = open();
|
||||
if (gid > 0) {
|
||||
try {
|
||||
H5G_info_t group_info = null;
|
||||
group_info = H5.H5Gget_info(gid);
|
||||
nMembersInFile = (int) group_info.nlinks;
|
||||
}
|
||||
catch (Exception ex) {
|
||||
nMembersInFile = 0;
|
||||
}
|
||||
close(gid);
|
||||
}
|
||||
}
|
||||
return nMembersInFile;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.Group#clear()
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public void clear() {
|
||||
super.clear();
|
||||
|
||||
if (attributeList != null) {
|
||||
((Vector) attributeList).setSize(0);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#getMetadata()
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List getMetadata() throws HDF5Exception {
|
||||
return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#getMetadata(int...)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List getMetadata(int... attrPropList) throws HDF5Exception {
|
||||
log.trace("getMetadata(): start");
|
||||
if (attributeList == null) {
|
||||
log.trace("getMetadata(): get attributeList");
|
||||
|
||||
int indxType = fileFormat.getIndexType(null);
|
||||
int order = fileFormat.getIndexOrder(null);
|
||||
|
||||
if (attrPropList.length > 0) {
|
||||
indxType = attrPropList[0];
|
||||
if (attrPropList.length > 1) {
|
||||
order = attrPropList[1];
|
||||
}
|
||||
}
|
||||
try {
|
||||
attributeList = H5File.getAttribute(this, indxType, order);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
log.debug("getMetadata(): H5File.getAttribute failure: ", ex);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (!this.isRoot()) this.linkTargetObjName = H5File.getLinkTargetName(this);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
log.debug("getMetadata(): getLinkTargetName failure: ", ex);
|
||||
}
|
||||
|
||||
log.trace("getMetadata(): finish");
|
||||
return attributeList;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void writeMetadata(Object info) throws Exception {
|
||||
log.trace("writeMetadata(): start");
|
||||
// only attribute metadata is supported.
|
||||
if (!(info instanceof Attribute)) {
|
||||
log.debug("writeMetadata(): Object not an Attribute");
|
||||
log.trace("writeMetadata(): finish");
|
||||
return;
|
||||
}
|
||||
|
||||
boolean attrExisted = false;
|
||||
Attribute attr = (Attribute) info;
|
||||
log.trace("writeMetadata(): {}", attr.getName());
|
||||
|
||||
if (attributeList == null) {
|
||||
this.getMetadata();
|
||||
}
|
||||
|
||||
if (attributeList != null) attrExisted = attributeList.contains(attr);
|
||||
|
||||
getFileFormat().writeAttribute(this, attr, attrExisted);
|
||||
// add the new attribute into attribute list
|
||||
if (!attrExisted) {
|
||||
attributeList.add(attr);
|
||||
nAttributes = attributeList.size();
|
||||
}
|
||||
log.trace("writeMetadata(): finish");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void removeMetadata(Object info) throws HDF5Exception {
|
||||
log.trace("removeMetadata(): start");
|
||||
// only attribute metadata is supported.
|
||||
if (!(info instanceof Attribute)) {
|
||||
log.debug("removeMetadata(): Object not an Attribute");
|
||||
log.trace("removeMetadata(): finish");
|
||||
return;
|
||||
}
|
||||
|
||||
Attribute attr = (Attribute) info;
|
||||
log.trace("removeMetadata(): {}", attr.getName());
|
||||
long gid = open();
|
||||
if(gid >= 0) {
|
||||
try {
|
||||
H5.H5Adelete(gid, attr.getName());
|
||||
List attrList = getMetadata();
|
||||
attrList.remove(attr);
|
||||
nAttributes = attributeList.size();
|
||||
}
|
||||
finally {
|
||||
close(gid);
|
||||
}
|
||||
}
|
||||
else {
|
||||
log.debug("removeMetadata(): failed to open group");
|
||||
}
|
||||
|
||||
log.trace("removeMetadata(): finish");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void updateMetadata(Object info) throws HDF5Exception {
|
||||
log.trace("updateMetadata(): start");
|
||||
// only attribute metadata is supported.
|
||||
if (!(info instanceof Attribute)) {
|
||||
log.debug("updateMetadata(): Object not an Attribute");
|
||||
log.trace("updateMetadata(): finish");
|
||||
return;
|
||||
}
|
||||
|
||||
nAttributes = -1;
|
||||
log.trace("updateMetadata(): finish");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.HObject#open()
|
||||
*/
|
||||
@Override
|
||||
public long open() {
|
||||
log.trace("open(): start");
|
||||
long gid = -1;
|
||||
|
||||
try {
|
||||
if (isRoot()) {
|
||||
gid = H5.H5Gopen(getFID(), SEPARATOR, HDF5Constants.H5P_DEFAULT);
|
||||
}
|
||||
else {
|
||||
gid = H5.H5Gopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
|
||||
}
|
||||
|
||||
}
|
||||
catch (HDF5Exception ex) {
|
||||
gid = -1;
|
||||
}
|
||||
|
||||
log.trace("open(): finish");
|
||||
return gid;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.HObject#close(int)
|
||||
*/
|
||||
@Override
|
||||
public void close(long gid) {
|
||||
try {
|
||||
H5.H5Gclose(gid);
|
||||
}
|
||||
catch (HDF5Exception ex) {
|
||||
log.debug("close(): H5Gclose(gid {}): ", gid, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new group with a name in a group and with the group creation
|
||||
* properties specified in gplist.
|
||||
* <p>
|
||||
* The gplist contains a sequence of group creation property list
|
||||
* identifiers, lcpl, gcpl, gapl. It allows the user to create a group with
|
||||
* group creation properties. It will close the group creation properties
|
||||
* specified in gplist.
|
||||
*
|
||||
* @see H5#H5Gcreate(long, String, long, long, long) for the
|
||||
* order of property list identifiers.
|
||||
*
|
||||
* @param name
|
||||
* The name of a new group.
|
||||
* @param pgroup
|
||||
* The parent group object.
|
||||
* @param gplist
|
||||
* The group creation properties, in which the order of the
|
||||
* properties conforms the HDF5 library API, H5Gcreate(), i.e.
|
||||
* lcpl, gcpl and gapl, where
|
||||
* <ul>
|
||||
* <li>lcpl : Property list for link creation <li>gcpl : Property
|
||||
* list for group creation <li>gapl : Property list for group
|
||||
* access
|
||||
* </ul>
|
||||
*
|
||||
* @return The new group if successful; otherwise returns null.
|
||||
*
|
||||
* @throws Exception if there is a failure.
|
||||
*/
|
||||
public static H5Group create(String name, Group pgroup, long... gplist) throws Exception {
|
||||
log.trace("create(): start");
|
||||
H5Group group = null;
|
||||
String fullPath = null;
|
||||
long lcpl = HDF5Constants.H5P_DEFAULT;
|
||||
long gcpl = HDF5Constants.H5P_DEFAULT;
|
||||
long gapl = HDF5Constants.H5P_DEFAULT;
|
||||
|
||||
if (gplist.length > 0) {
|
||||
lcpl = gplist[0];
|
||||
if (gplist.length > 1) {
|
||||
gcpl = gplist[1];
|
||||
if (gplist.length > 2) gapl = gplist[2];
|
||||
}
|
||||
}
|
||||
|
||||
if ((name == null) || (pgroup == null)) {
|
||||
log.debug("create(): one or more parameters are null");
|
||||
log.trace("create(): finish");
|
||||
System.err.println("(name == null) || (pgroup == null)");
|
||||
return null;
|
||||
}
|
||||
|
||||
H5File file = (H5File) pgroup.getFileFormat();
|
||||
|
||||
if (file == null) {
|
||||
log.debug("create(): Parent Group FileFormat is null");
|
||||
log.trace("create(): finish");
|
||||
System.err.println("Could not get file that contains object");
|
||||
return null;
|
||||
}
|
||||
|
||||
String path = HObject.SEPARATOR;
|
||||
if (!pgroup.isRoot()) {
|
||||
path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
|
||||
if (name.endsWith("/")) {
|
||||
name = name.substring(0, name.length() - 1);
|
||||
}
|
||||
int idx = name.lastIndexOf('/');
|
||||
if (idx >= 0) {
|
||||
name = name.substring(idx + 1);
|
||||
}
|
||||
}
|
||||
|
||||
fullPath = path + name;
|
||||
|
||||
// create a new group and add it to the parent node
|
||||
long gid = H5.H5Gcreate(file.open(), fullPath, lcpl, gcpl, gapl);
|
||||
try {
|
||||
H5.H5Gclose(gid);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
log.debug("create(): H5Gcreate {} H5Gclose(gid {}) failure: ", fullPath, gid, ex);
|
||||
}
|
||||
|
||||
byte[] ref_buf = H5.H5Rcreate(file.open(), fullPath, HDF5Constants.H5R_OBJECT, -1);
|
||||
long l = HDFNativeData.byteToLong(ref_buf, 0);
|
||||
long[] oid = { l };
|
||||
|
||||
group = new H5Group(file, name, path, pgroup, oid);
|
||||
|
||||
if (group != null) {
|
||||
pgroup.addToMemberList(group);
|
||||
}
|
||||
|
||||
if (gcpl > 0) {
|
||||
try {
|
||||
H5.H5Pclose(gcpl);
|
||||
}
|
||||
catch (final Exception ex) {
|
||||
log.debug("create(): create prop H5Pclose(gcpl {}) failure: ", gcpl, ex);
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("create(): finish");
|
||||
return group;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.HObject#setName(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setName(String newName) throws Exception {
|
||||
H5File.renameObject(this, newName);
|
||||
super.setName(newName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.HObject#setPath(java.lang.String)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public void setPath(String newPath) throws Exception {
|
||||
super.setPath(newPath);
|
||||
|
||||
List members = this.getMemberList();
|
||||
if (members == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
int n = members.size();
|
||||
HObject obj = null;
|
||||
for (int i = 0; i < n; i++) {
|
||||
obj = (HObject) members.get(i);
|
||||
obj.setPath(getPath() + getName() + HObject.SEPARATOR);
|
||||
}
|
||||
}
|
||||
}
|
112
src/main/java/hdf/object/h5/H5Link.java
Normal file
112
src/main/java/hdf/object/h5/H5Link.java
Normal file
@ -0,0 +1,112 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object.h5;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import hdf.hdf5lib.structs.H5O_info_t;
|
||||
import hdf.object.FileFormat;
|
||||
import hdf.object.HObject;
|
||||
import hdf.object.MetaDataContainer;
|
||||
|
||||
/**
|
||||
* An H5Link object represents an existing HDF5 object in file.
|
||||
* <p>
|
||||
* H5Link object is an HDF5 object that is either a soft or an external link to
|
||||
* an object in a file that does not exist. The type of the object is unknown.
|
||||
* Once the object being linked to is created, and the type is known, then
|
||||
* H5link object will change its type.
|
||||
*
|
||||
* @version 2.7.2 7/6/2010
|
||||
* @author Nidhi Gupta
|
||||
*/
|
||||
|
||||
public class H5Link extends HObject implements MetaDataContainer {
|
||||
private static final long serialVersionUID = -8137277460521594367L;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private H5O_info_t obj_info;
|
||||
|
||||
/**
|
||||
* Constructs an HDF5 link with specific name, path, and parent.
|
||||
*
|
||||
* @param theFile
|
||||
* the file which containing the link.
|
||||
* @param name
|
||||
* the name of this link, e.g. "link1".
|
||||
* @param path
|
||||
* the full path of this link, e.g. "/groups/".
|
||||
*/
|
||||
public H5Link(FileFormat theFile, String name, String path) {
|
||||
this (theFile, name, path, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public H5Link(FileFormat theFile, String theName, String thePath,
|
||||
long[] oid) {
|
||||
super(theFile, theName, thePath, oid);
|
||||
|
||||
obj_info = new H5O_info_t(-1L, -1L, -1, 0, -1L, 0L, 0L, 0L, 0L, null,null,null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close(long id) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public long open() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List getMetadata() throws Exception {
|
||||
|
||||
try{
|
||||
this.linkTargetObjName= H5File.getLinkTargetName(this);
|
||||
}catch(Exception ex){
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean hasAttribute() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void removeMetadata(Object info) throws Exception {
|
||||
}
|
||||
|
||||
public void writeMetadata(Object info) throws Exception {
|
||||
}
|
||||
|
||||
public void updateMetadata(Object info) throws Exception {
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List getMetadata(int... attrPropList) throws Exception {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see hdf.object.HObject#setName(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setName(String newName) throws Exception {
|
||||
H5File.renameObject(this, newName);
|
||||
super.setName(newName);
|
||||
}
|
||||
}
|
2491
src/main/java/hdf/object/h5/H5ScalarDS.java
Normal file
2491
src/main/java/hdf/object/h5/H5ScalarDS.java
Normal file
File diff suppressed because it is too large
Load Diff
135
src/main/java/hdf/object/h5/H5Utils.java
Normal file
135
src/main/java/hdf/object/h5/H5Utils.java
Normal file
@ -0,0 +1,135 @@
|
||||
/*****************************************************************************
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of the HDF Java Products distribution. *
|
||||
* The full copyright notice, including terms governing use, modification, *
|
||||
* and redistribution, is contained in the files COPYING and Copyright.html. *
|
||||
* COPYING can be found at the root of the source code distribution tree. *
|
||||
* Or, see https://support.hdfgroup.org/products/licenses.html *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
****************************************************************************/
|
||||
|
||||
package hdf.object.h5;
|
||||
|
||||
import hdf.hdf5lib.H5;
|
||||
import hdf.hdf5lib.HDF5Constants;
|
||||
import hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
|
||||
public final class H5Utils {
|
||||
|
||||
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5Utils.class);
|
||||
|
||||
/**
|
||||
* Set up a hyperslab selection within a dataset.
|
||||
*
|
||||
* @param did
|
||||
* IN dataset ID
|
||||
* @param dsetDims
|
||||
* IN dimensions
|
||||
* @param startDims
|
||||
* IN start dimensions
|
||||
* @param selectedStride
|
||||
* IN selected stride values
|
||||
* @param selectedDims
|
||||
* IN selected dimensions
|
||||
* @param spaceIDs
|
||||
* IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
|
||||
*
|
||||
* @return total number of data points selected
|
||||
*
|
||||
* @throws HDF5Exception
|
||||
* If there is an error at the HDF5 library level.
|
||||
*/
|
||||
public static final long selectHyperslab(long did, long[] dsetDims, long[] startDims, long[] selectedStride,
|
||||
long[] selectedDims, long[] spaceIDs) throws HDF5Exception {
|
||||
log.trace("selectHyperslab(): start");
|
||||
|
||||
if (dsetDims == null) {
|
||||
log.debug("selectHyperslab(): dsetDims is null");
|
||||
return -1;
|
||||
}
|
||||
|
||||
int rank = dsetDims.length;
|
||||
if ((startDims != null) && (startDims.length != rank)) {
|
||||
log.debug("selectHyperslab(): startDims rank didn't match dsetDims rank");
|
||||
return -1;
|
||||
}
|
||||
if ((selectedStride != null) && (selectedStride.length != rank)) {
|
||||
log.debug("selectHyperslab(): selectedStride rank didn't match startDims rank");
|
||||
return -1;
|
||||
}
|
||||
if ((selectedDims != null) && (selectedDims.length != rank)) {
|
||||
log.debug("selectHyperslab(): selectedDims rank didn't match startDims rank");
|
||||
return -1;
|
||||
}
|
||||
|
||||
long lsize = 1;
|
||||
|
||||
boolean isAllSelected = true;
|
||||
for (int i = 0; i < rank; i++) {
|
||||
if (selectedDims != null) {
|
||||
lsize *= selectedDims[i];
|
||||
if (selectedDims[i] < dsetDims[i]) {
|
||||
isAllSelected = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
|
||||
|
||||
if (isAllSelected) {
|
||||
spaceIDs[0] = HDF5Constants.H5S_ALL;
|
||||
spaceIDs[1] = HDF5Constants.H5S_ALL;
|
||||
}
|
||||
else {
|
||||
spaceIDs[1] = H5.H5Dget_space(did);
|
||||
|
||||
// When a 1D dataspace is used for a chunked dataset, reading is very slow.
|
||||
//
|
||||
// It is a known problem within the HDF5 library.
|
||||
// mspace = H5.H5Screate_simple(1, lsize, null);
|
||||
spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
|
||||
H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null);
|
||||
}
|
||||
|
||||
log.trace("selectHyperslab(): finish");
|
||||
|
||||
return lsize;
|
||||
}
|
||||
|
||||
public static final long getTotalSelectedSpacePoints(long did, long[] dsetDims, long[] startDims,
|
||||
long[] selectedStride, long[] selectedDims, long[] spaceIDs) throws HDF5Exception {
|
||||
long totalSelectedSpacePoints = selectHyperslab(did, dsetDims, startDims, selectedStride, selectedDims, spaceIDs);
|
||||
|
||||
log.trace("getTotalSelectedSpacePoints(): selected {} points in dataset's dataspace", totalSelectedSpacePoints);
|
||||
|
||||
if (totalSelectedSpacePoints == 0) {
|
||||
log.debug("getTotalSelectedSpacePoints(): No data to read. Dataset or selected subset is empty.");
|
||||
log.trace("getTotalSelectedSpacePoints(): finish");
|
||||
throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
|
||||
}
|
||||
|
||||
if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
|
||||
log.debug("getTotalSelectedSpacePoints(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
|
||||
log.trace("getTotalSelectedSpacePoints(): finish");
|
||||
throw new HDF5Exception("Invalid int size");
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
// check is storage space is allocated
|
||||
try {
|
||||
long ssize = H5.H5Dget_storage_size(did);
|
||||
log.trace("getTotalSelectedSpacePoints(): Storage space allocated = {} bytes", ssize);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
log.debug("getTotalSelectedSpacePoints(): check if storage space is allocated:", ex);
|
||||
}
|
||||
}
|
||||
|
||||
return totalSelectedSpacePoints;
|
||||
}
|
||||
|
||||
}
|
22
src/test/java/ch/psi/imagej/hdf5/HDF5ReaderTest.java
Normal file
22
src/test/java/ch/psi/imagej/hdf5/HDF5ReaderTest.java
Normal file
@ -0,0 +1,22 @@
|
||||
package ch.psi.imagej.hdf5;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HDF5ReaderTest {
|
||||
@Test
|
||||
public void parseArguments() throws Exception {
|
||||
|
||||
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
|
||||
assertTrue(map.get("para1").equals("value1"));
|
||||
assertTrue(map.get("para2").equals("value2"));
|
||||
assertTrue(map.get("PARA").equals("VAL"));
|
||||
}
|
||||
|
||||
}
|
@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import ij.IJ;
|
||||
import org.junit.Test;
|
||||
|
||||
public class HDF5UtilitiesTest {
|
||||
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
|
||||
assertEquals(gdescriptor, "three");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testOpen() {
|
||||
IJ.run("HDF5...");
|
||||
String descriptor = "/test/one/two/three";
|
||||
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
|
||||
System.out.println(gdescriptor);
|
||||
assertEquals(gdescriptor, "three");
|
||||
}
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user