This commit is contained in:
ebner 2014-03-05 14:34:28 +01:00
parent ef4135b9ad
commit 51f4a7beef
6 changed files with 99 additions and 92 deletions

View File

@ -12,14 +12,14 @@
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" path="src/main/resources"/>
<classpathentry kind="src" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5">
<classpathentry including="**/*.java" kind="src" path="src/main/resources"/>
<classpathentry including="**/*.java" kind="src" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>

View File

@ -0,0 +1,3 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/test/java=UTF-8

View File

@ -1,5 +1,5 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.5
org.eclipse.jdt.core.compiler.source=1.7

42
pom.xml
View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId>
<version>0.0.1</version>
<version>0.2.0</version>
<dependencies>
<dependency>
@ -12,39 +12,61 @@
<version>1.46</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdfobj</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdfobj.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf5</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf5.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf5obj</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf5obj.jar</systemPath> -->
</dependency>
<!-- <dependency>
<groupId>hdf5</groupId>
<artifactId>hdf</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdfobj</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdf5</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>hdf5obj</artifactId>
<version>2.10.0</version>
</dependency> -->
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<encoding>UTF-8</encoding>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4</version>

View File

@ -8,12 +8,14 @@ import java.lang.String;
public class HDF5Config implements PlugIn {
public static String GROUP_VARS_BY_NAME = "HDF5.groupVarsByName";
public void run(String arg) {
// set default values
setDefaultsIfNoValueExists();
// read ImageJ Preferences
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue(GROUP_VARS_BY_NAME));
groupVarsByName = Prefs.get(GROUP_VARS_BY_NAME, groupVarsByName);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
@ -90,7 +92,7 @@ public class HDF5Config implements PlugIn {
System.out.println("Saving...");
// all OK and "Save" was pressed, so save it...
Prefs.set("HDF5.groupVarsByName", groupVarsByName);
Prefs.set(GROUP_VARS_BY_NAME, groupVarsByName);
Prefs.set("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
Prefs.set("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
Prefs.set("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
@ -102,9 +104,9 @@ public class HDF5Config implements PlugIn {
}
public static void setDefaultsIfNoValueExists() {
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
Prefs.set("HDF5.groupVarsByName", groupVarsByName);
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue(GROUP_VARS_BY_NAME));
groupVarsByName = Prefs.get(GROUP_VARS_BY_NAME, groupVarsByName);
Prefs.set(GROUP_VARS_BY_NAME, groupVarsByName);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
@ -124,7 +126,7 @@ public class HDF5Config implements PlugIn {
}
public static String getDefaultValue(String key) {
if (key.equals("HDF5.groupVarsByName")) {
if (key.equals(GROUP_VARS_BY_NAME)) {
boolean groupVarsByName = true; // default
return Boolean.toString(groupVarsByName);
} else if (key.equals("HDF5.showUnmatchedDataSetNames")) {

View File

@ -32,16 +32,20 @@ import ij.process.ImageProcessor;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.awt.*;
import ncsa.hdf.object.*; // the common object package
import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.object.*;
import ncsa.hdf.object.h5.*;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import ncsa.hdf.hdflib.HDFException;
public class HDF5Reader implements PlugIn {
private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName());
public void run(String arg) {
// make sure default values for config are written
// HDF5_Config.setDefaultsIfNoValueExists();
@ -91,8 +95,8 @@ public class HDF5Reader implements PlugIn {
/*-------------------------------------------------------------------
* read HDF5_Config prefs
*-------------------------------------------------------------------*/
boolean groupVarsByName = Boolean.getBoolean(HDF5Config.getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
boolean groupVarsByName = Boolean.getBoolean(HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME));
groupVarsByName = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME, groupVarsByName);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(HDF5Config.getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
@ -385,6 +389,13 @@ public class HDF5Reader implements PlugIn {
}
} else if (varList.size() > 1000) {
System.out.println("#######");
for(Dataset d: varList){
System.out.println(d.getFullName());
}
System.out.println("#######");
/*-----------------------------------------------------------------
* FIXME: quick an dirty hack for files with more than 1000
* datasets
@ -784,7 +795,7 @@ public class HDF5Reader implements PlugIn {
System.out.println(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
// nice gadget to update the progress bar
long progressDivisor = extent[0] / progressSteps;
long progressDivisor = extent[0] / 50; // we assume 50 process steps
if (progressDivisor < 1)
progressDivisor = 1;
@ -1689,21 +1700,8 @@ public class HDF5Reader implements PlugIn {
}
}
} catch (java.io.IOException err) {
System.err.println("Error while opening '" + directory + name + "'");
System.err.println(err);
IJ.showStatus("Error opening file.");
} catch (HDFException err) {
System.err.println("Error while opening '" + directory + name + "'");
System.err.println(err);
IJ.showStatus("Error opening file.");
} catch (HDF5Exception err) {
System.err.println("Error while opening '" + directory + name + "'");
System.err.println(err);
IJ.showStatus("Error opening file.");
} catch (Exception err) {
System.err.println("Error while opening '" + directory + name + "'");
System.err.println(err);
} catch (Exception e) {
logger.log(Level.WARNING, "Error while opening '" + directory + name + "'", e);
IJ.showStatus("Error opening file.");
} catch (OutOfMemoryError o) {
IJ.outOfMemory("Load HDF5");
@ -1722,18 +1720,7 @@ public class HDF5Reader implements PlugIn {
IJ.showProgress(1.0);
}
// int byteToUnsignedByte(int n)
// {
// if (n < 0)
// return (256 + n);
// return n;
// }
private int progressSteps = 50;
/*-----------------------------------------------------------------------
* helpers for hdf5 library
*-----------------------------------------------------------------------*/
private static List<Dataset> getDataSetList(Group g, List<Dataset> datasets) throws Exception {
if (g == null){
return datasets;
@ -1744,7 +1731,6 @@ public class HDF5Reader implements PlugIn {
if (obj instanceof Dataset) {
((Dataset) obj).init();
datasets.add((Dataset) obj);
// System.out.println(obj.getFullName());
} else if (obj instanceof Group) {
datasets = (getDataSetList((Group) obj, datasets));
}
@ -1752,9 +1738,11 @@ public class HDF5Reader implements PlugIn {
return datasets;
}
private static List<Attribute> getAttrList(HObject ds) throws Exception {
if (ds == null)
if (ds == null){
return null;
}
List<Attribute> attributes = new ArrayList<Attribute>();
List<?> members = ds.getMetadata();
@ -1763,48 +1751,39 @@ public class HDF5Reader implements PlugIn {
for (int i = 0; i < n; i++) {
obj = (Metadata) members.get(i);
if (obj instanceof Attribute) {
try {
System.out.println(((Attribute) obj).getName());
attributes.add((Attribute) obj);
} catch (java.lang.UnsupportedOperationException e) {
System.out.println("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
System.out.println(e.getMessage());
}
}
}
return attributes;
}
private static Attribute getAttribute(Dataset ds, String attrName) throws Exception {
List<Attribute> attrList = getAttrList((HObject) ds);
Iterator<Attribute> attrIter = attrList.iterator();
while (attrIter.hasNext()) {
Attribute attr = attrIter.next();
if (attr.getName().equals(attrName)) {
return attr;
for(Attribute a: getAttrList((HObject) ds)){
if (a.getName().equals(attrName)) {
return a;
}
}
return null;
}
private static Attribute getAttribute(HObject ds, String attrName) throws Exception {
List<Attribute> attrList = getAttrList(ds);
Iterator<Attribute> attrIter = attrList.iterator();
while (attrIter.hasNext()) {
Attribute attr = attrIter.next();
System.out.println(attr.getName());
if (attr.getName().equals(attrName)) {
return attr;
for(Attribute a: getAttrList(ds)){
if (a.getName().equals(attrName)) {
return a;
}
}
return null;
}
/*-----------------------------------------------------------------------
* minmax of array
*-----------------------------------------------------------------------*/
/**
* Find min and maximum of array
* @param data
* @param stackSize
* @return
*/
private double[] getMinMax(Object data, long stackSize) {
double[] minmax = new double[2];
@ -1879,13 +1858,11 @@ public class HDF5Reader implements PlugIn {
minmax[1] = tmp[i];
}
}
System.out.println("min: " + minmax[0] + ", max: " + minmax[1]);
logger.info("min: " + minmax[0] + ", max: " + minmax[1]);
return minmax;
}
/*-----------------------------------------------------------------------
* converter functions
*-----------------------------------------------------------------------*/
private float[] convertDoubleToFloat(double[] dataIn) {
float[] dataOut = new float[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {
@ -1894,6 +1871,7 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
private float[] convertInt32ToFloat(int[] dataIn) {
float[] dataOut = new float[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {
@ -1902,6 +1880,7 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
private short[] convertInt32ToShort(int[] dataIn) {
short[] dataOut = new short[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {
@ -1910,6 +1889,7 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
private float[] convertInt64ToFloat(long[] dataIn) {
float[] dataOut = new float[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {
@ -1918,6 +1898,7 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
private short[] convertInt64ToShort(long[] dataIn) {
short[] dataOut = new short[dataIn.length];
for (int index = 0; index < dataIn.length; index++) {
@ -1926,6 +1907,7 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
private Object convertToUnsigned(Object dataIn, int unsignedConvSelec) {
Object dataOut = null;
if (unsignedConvSelec == 0) {
@ -1951,10 +1933,8 @@ public class HDF5Reader implements PlugIn {
return dataOut;
}
/*-----------------------------------------------------------------------
* extract subarrays
*-----------------------------------------------------------------------*/
Object extractSubarray(Object data, long startIdx, long numElements) {
private Object extractSubarray(Object data, long startIdx, long numElements) {
Object subarray = null;
if (data instanceof byte[]) {