Removed compiler warnings

This commit is contained in:
ebner 2014-02-25 20:20:31 +01:00
parent b381b87ff9
commit c0dd33436e
7 changed files with 3135 additions and 4251 deletions

17
pom.xml
View File

@ -13,4 +13,21 @@
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4</version>
<configuration>
<finalName>HDF5_Viewer-${pom.version}</finalName>
<appendAssemblyId>false</appendAssemblyId>
<archive />
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,51 +0,0 @@
package ch.psi.imagej.hdf5;
/* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
public class HDF5_Batch_
{
public static void run(String arg)
{
parseArgs(arg);
System.out.println("filename");
System.out.println(_filename);
System.out.println("varnames");
for(int i=0; i<_varnames.length; i++)
System.out.println(_varnames[i]);
HDF5_Writer_ w = new HDF5_Writer_();
w.setToBatchMode(_filename,_varnames);
w.run(null);
}
private static void parseArgs(String arg)
{
String[] result = arg.split("]\\s");
_filename = result[0].replaceAll("file=\\[","");
String[] splitVars = result[1].split("\\s");
_varnames = new String[splitVars.length];
for (int x=0; x<splitVars.length; x++)
_varnames[x] = splitVars[x];
}
static private String _filename = null;
static private String[] _varnames = null;
// end of class
}

View File

@ -1,22 +1,4 @@
package ch.psi.imagej.hdf5;
/* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
import ij.Prefs;
import ij.gui.GenericDialog;
@ -24,53 +6,33 @@ import ij.plugin.PlugIn;
import java.util.regex.*;
import java.lang.String;
public class HDF5_Config implements PlugIn {
public class HDF5_Config implements PlugIn
{
public void run(String arg)
{
public void run(String arg) {
// set default values
setDefaultsIfNoValueExists();
// read ImageJ Preferences
boolean groupVarsByName =
Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
boolean showUnmatchedDataSetNames =
Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames",
showUnmatchedDataSetNames);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
String groupVarsByNameFormatGroup =
getDefaultValue("HDF5.groupVarsByNameFormatGroup");
groupVarsByNameFormatGroup
= Prefs.get("HDF5.groupVarsByNameFormatGroup",
groupVarsByNameFormatGroup);
String groupVarsByNameFormatGroup = getDefaultValue("HDF5.groupVarsByNameFormatGroup");
groupVarsByNameFormatGroup = Prefs.get("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
String groupVarsByNameFormat =
getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat",
groupVarsByNameFormat);
String groupVarsByNameFormat = getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
String dollarRegexpForGrouping =
getDefaultValue("HDF5.dollarRegexpForGrouping");
dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping",
dollarRegexpForGrouping);
String dollarRegexpForGrouping = getDefaultValue("HDF5.dollarRegexpForGrouping");
dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping);
GenericDialog configDiag =
new GenericDialog("HDF5 Preferences");
GenericDialog configDiag = new GenericDialog("HDF5 Preferences");
configDiag.addMessage("Reader:");
configDiag.addCheckbox("Group data set names instead of showing a list " +
"of data set names.",
groupVarsByName);
configDiag.addCheckbox("Show unmatched data set names in a separate list",
showUnmatchedDataSetNames);
configDiag.addStringField("HDF5 group containing pattern " +
"for data set grouping: ",
groupVarsByNameFormatGroup,15);
configDiag.addStringField("Pattern for grouping (if no attributes" +
" are found): ",
groupVarsByNameFormat,15);
configDiag.addCheckbox("Group data set names instead of showing a list " + "of data set names.", groupVarsByName);
configDiag.addCheckbox("Show unmatched data set names in a separate list", showUnmatchedDataSetNames);
configDiag.addStringField("HDF5 group containing pattern " + "for data set grouping: ", groupVarsByNameFormatGroup, 15);
configDiag.addStringField("Pattern for grouping (if no attributes" + " are found): ", groupVarsByNameFormat, 15);
// configDiag.addStringField("$ regexp (ignored because only numbers" +
// " work right now): ",
// dollarRegexpForGrouping,15);
@ -78,16 +40,14 @@ public class HDF5_Config implements PlugIn
String yesLabel = "Save";
String noLabel = "Reset";
configDiag.enableYesNoCancel(yesLabel,noLabel);
configDiag.enableYesNoCancel(yesLabel, noLabel);
configDiag.showDialog();
if(configDiag.wasCanceled())
{
if (configDiag.wasCanceled()) {
// do nothing
return;
}
if(!configDiag.wasOKed())
{
if (!configDiag.wasOKed()) {
// reset button was pressed
System.out.println("reset button was pressed");
// reset all and return a new dialog
@ -98,39 +58,27 @@ public class HDF5_Config implements PlugIn
// get parameters check if they are correct
groupVarsByName = configDiag.getNextBoolean();
System.out.println("groupVarsByName: " +
Boolean.toString(groupVarsByName));
System.out.println("groupVarsByName: " + Boolean.toString(groupVarsByName));
showUnmatchedDataSetNames = configDiag.getNextBoolean();
System.out.println("showUnmatchedDataSetNames: " +
Boolean.toString(showUnmatchedDataSetNames));
System.out.println("showUnmatchedDataSetNames: " + Boolean.toString(showUnmatchedDataSetNames));
groupVarsByNameFormatGroup = configDiag.getNextString();
System.out.println("groupVarsByNameFormatGroup: " +
groupVarsByNameFormatGroup);
System.out.println("groupVarsByNameFormatGroup: " + groupVarsByNameFormatGroup);
groupVarsByNameFormat = configDiag.getNextString();
System.out.println("groupVarsByNameFormat: " +
groupVarsByNameFormat);
System.out.println("groupVarsByNameFormat: " + groupVarsByNameFormat);
// dollarRegexpForGrouping = configDiag.getNextString();
// System.out.println("dollarRegexpForGrouping: " +
// dollarRegexpForGrouping);
try
{
String[] formatTokens
= HDF5_GroupedVarnames.parseFormatString(groupVarsByNameFormat,
dollarRegexpForGrouping);
for(int i=0; i<formatTokens.length; i++)
{
System.out.println("tok " + Integer.toString(i) + " : "
+ formatTokens[i]);
try {
String[] formatTokens = HDF5_GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
for (int i = 0; i < formatTokens.length; i++) {
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
}
}
catch(PatternSyntaxException e)
{
} catch (PatternSyntaxException e) {
// produce an error dialog an start over
String errMsg = e.getMessage();
System.out.println(errMsg);
@ -142,89 +90,56 @@ public class HDF5_Config implements PlugIn
System.out.println("Saving...");
// all OK and "Save" was pressed, so save it...
Prefs.set("HDF5.groupVarsByName",
groupVarsByName);
Prefs.set("HDF5.showUnmatchedDataSetNames",
showUnmatchedDataSetNames);
Prefs.set("HDF5.groupVarsByNameFormatGroup",
groupVarsByNameFormatGroup);
Prefs.set("HDF5.groupVarsByNameFormat",
groupVarsByNameFormat);
Prefs.set("HDF5.groupVarsByName", groupVarsByName);
Prefs.set("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
Prefs.set("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
Prefs.set("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
//
// ignore the $ regexp for now, because only numbers work
//
Prefs.set("HDF5.dollarRegexpForGrouping",
dollarRegexpForGrouping);
Prefs.set("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping);
}
public static void setDefaultsIfNoValueExists()
{
boolean groupVarsByName =
Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
public static void setDefaultsIfNoValueExists() {
boolean groupVarsByName = Boolean.getBoolean(getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
Prefs.set("HDF5.groupVarsByName",
groupVarsByName);
Prefs.set("HDF5.groupVarsByName", groupVarsByName);
boolean showUnmatchedDataSetNames =
Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames",
showUnmatchedDataSetNames);
Prefs.set("HDF5.showUnmatchedDataSetNames",
showUnmatchedDataSetNames);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
Prefs.set("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
String groupVarsByNameFormatGroup =
getDefaultValue("HDF5.groupVarsByNameFormatGroup");
groupVarsByNameFormatGroup
= Prefs.get("HDF5.groupVarsByNameFormatGroup",
groupVarsByNameFormatGroup);
Prefs.set("HDF5.groupVarsByNameFormatGroup",
groupVarsByNameFormatGroup);
String groupVarsByNameFormatGroup = getDefaultValue("HDF5.groupVarsByNameFormatGroup");
groupVarsByNameFormatGroup = Prefs.get("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
Prefs.set("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
String groupVarsByNameFormat =
getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat",
groupVarsByNameFormat);
Prefs.set("HDF5.groupVarsByNameFormat",
groupVarsByNameFormat);
String groupVarsByNameFormat = getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
Prefs.set("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
String dollarRegexpForGrouping =
getDefaultValue("HDF5.dollarRegexpForGrouping");
dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping",
dollarRegexpForGrouping);
Prefs.set("HDF5.dollarRegexpForGrouping",
dollarRegexpForGrouping);
String dollarRegexpForGrouping = getDefaultValue("HDF5.dollarRegexpForGrouping");
dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping);
Prefs.set("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping);
}
public static String getDefaultValue(String key)
{
if(key.equals("HDF5.groupVarsByName"))
{
public static String getDefaultValue(String key) {
if (key.equals("HDF5.groupVarsByName")) {
boolean groupVarsByName = true; // default
return Boolean.toString(groupVarsByName);
}
else if(key.equals("HDF5.showUnmatchedDataSetNames"))
{
} else if (key.equals("HDF5.showUnmatchedDataSetNames")) {
boolean showUnmatchedDataSetNames = true; // default
return Boolean.toString(showUnmatchedDataSetNames);
}
else if(key.equals("HDF5.groupVarsByNameFormatGroup"))
{
} else if (key.equals("HDF5.groupVarsByNameFormatGroup")) {
String groupVarsByNameFormatGroup = "/hints"; // default
return groupVarsByNameFormatGroup;
}
else if(key.equals("HDF5.groupVarsByNameFormat"))
{
} else if (key.equals("HDF5.groupVarsByNameFormat")) {
String groupVarsByNameFormat = "/t$T/channel$C"; // default
return groupVarsByNameFormat;
}
else if(key.equals("HDF5.dollarRegexpForGrouping"))
{
} else if (key.equals("HDF5.dollarRegexpForGrouping")) {
String dollarRegexpForGrouping = "[0-9]+"; // default
return dollarRegexpForGrouping;
}
else
{
} else {
System.out.println("No default value for key: " + key);
return null;
}

View File

@ -1,21 +1,4 @@
package ch.psi.imagej.hdf5;
/* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
import java.util.regex.*;
import java.util.ArrayList;
@ -23,71 +6,54 @@ import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class HDF5_GroupedVarnames
{
public static String[] parseFormatString(String groupVarsByNameFormat,
String dollarRegexpForGrouping)
throws PatternSyntaxException
{
public class HDF5_GroupedVarnames {
private final List<String> matchedVarNames = new ArrayList<String>();
private final List<String> unMatchedVarNames = new ArrayList<String>();
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
private String[] formatTokens = null;
private String formatString = null;
private int minFrameIndex = -1;
private int maxFrameIndex = -1;
private int minChannelIndex = -1;
private int maxChannelIndex = -1;
private int nChannels = -1;
public static String[] parseFormatString(String groupVarsByNameFormat, String dollarRegexpForGrouping) throws PatternSyntaxException {
String[] formatTokens = null;
formatTokens = groupVarsByNameFormat.split("([$]T|[$]C)");
boolean containsFormatVars
= groupVarsByNameFormat.contains("$T") &&
groupVarsByNameFormat.contains("$C");
boolean rightOrderOfFormatVars
= groupVarsByNameFormat.indexOf("$T")
< groupVarsByNameFormat.indexOf("$C");
boolean containsFormatVars = groupVarsByNameFormat.contains("$T") && groupVarsByNameFormat.contains("$C");
boolean rightOrderOfFormatVars = groupVarsByNameFormat.indexOf("$T") < groupVarsByNameFormat.indexOf("$C");
for(int i=0; i<formatTokens.length; i++)
{
System.out.println("tok " + Integer.toString(i) + " : "
+ formatTokens[i]);
for (int i = 0; i < formatTokens.length; i++) {
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
}
if(formatTokens.length < 2 ||
!containsFormatVars ||
!rightOrderOfFormatVars)
{
throw new PatternSyntaxException("Your format string has errors. "+
"You must provide $T and $C and "+
"also in correct order!",
groupVarsByNameFormat,
-1);
if (formatTokens.length < 2 || !containsFormatVars || !rightOrderOfFormatVars) {
throw new PatternSyntaxException("Your format string has errors. " + "You must provide $T and $C and " + "also in correct order!", groupVarsByNameFormat, -1);
}
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T",
dollarRegexpForGrouping);
regexp = regexp.replace("$C",
dollarRegexpForGrouping);
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
System.out.println(regexp);
// check if we have a regexp;
Pattern p = null;
p = Pattern.compile(regexp);
Pattern.compile(regexp);
return formatTokens;
}
public void parseVarNames(String[] varNames,
String groupVarsByNameFormat,
String dollarRegexpForGrouping)
{
public void parseVarNames(String[] varNames, String groupVarsByNameFormat, String dollarRegexpForGrouping) {
// save format string
formatString = groupVarsByNameFormat;
try
{
formatTokens = parseFormatString(groupVarsByNameFormat,
dollarRegexpForGrouping);
}
catch(PatternSyntaxException e)
{
try {
formatTokens = parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
} catch (PatternSyntaxException e) {
// produce an error dialog an start over
String errMsg = e.getMessage();
System.out.println(errMsg);
return;
}
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T",
dollarRegexpForGrouping);
regexp = regexp.replace("$C",
dollarRegexpForGrouping);
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
System.out.println(regexp);
// check if we have a regexp;
@ -96,17 +62,13 @@ public class HDF5_GroupedVarnames
/*---------------------------------------------------------------------
* parse var names
*---------------------------------------------------------------------*/
for (int i = 0; i < varNames.length; i++)
{
Matcher m=p.matcher(varNames[i]);
boolean b=m.matches();
if(b)
{
for (int i = 0; i < varNames.length; i++) {
Matcher m = p.matcher(varNames[i]);
boolean b = m.matches();
if (b) {
System.out.println(varNames[i]);
matchedVarNames.add(varNames[i]);
}
else
{
} else {
unMatchedVarNames.add(varNames[i]);
}
}
@ -114,42 +76,33 @@ public class HDF5_GroupedVarnames
// ugly hack for sorting ArrayList
Object[] frameListAsArray = frameList.toArray();
Arrays.sort(frameListAsArray);
for(int i=0;i<frameListAsArray.length;i++)
frameList.set(i,(TimeFrame)frameListAsArray[i]);
for (int i = 0; i < frameListAsArray.length; i++)
frameList.set(i, (TimeFrame) frameListAsArray[i]);
}
public TimeFrame getFrame(int i)
{
if(i < frameList.size() && i > -1)
public TimeFrame getFrame(int i) {
if (i < frameList.size() && i > -1)
return frameList.get(i);
else
return null;
}
private void splitGroupedVarnames()
{
private void splitGroupedVarnames() {
Iterator<String> vars = matchedVarNames.iterator();
while(vars.hasNext())
{
while (vars.hasNext()) {
String varName = vars.next();
String[] tokens = null;
if(formatTokens.length == 2)
{
if (formatTokens.length == 2) {
tokens = varName.split(formatTokens[1]);
}
else if(formatTokens.length == 3)
{
} else if (formatTokens.length == 3) {
tokens = varName.split(formatTokens[2]);
varName = tokens[0];
tokens = varName.split(formatTokens[1]);
}
if(tokens.length < 2 || tokens.length > 3)
{
if (tokens.length < 2 || tokens.length > 3) {
System.out.println("Error parsing varname!");
}
else
{
} else {
Integer channelIndex = new Integer(tokens[1]);
System.out.println("channelIndex: " + channelIndex.toString());
System.out.println("left token: " + tokens[0]);
@ -157,88 +110,81 @@ public class HDF5_GroupedVarnames
Integer frameIndex = new Integer(tokens[1]);
System.out.println("frameIndex: " + frameIndex.toString());
if(minFrameIndex == -1)
if (minFrameIndex == -1)
minFrameIndex = frameIndex.intValue();
minFrameIndex = Math.min(minFrameIndex,frameIndex.intValue());
minFrameIndex = Math.min(minFrameIndex, frameIndex.intValue());
if(maxFrameIndex == -1)
if (maxFrameIndex == -1)
maxFrameIndex = frameIndex.intValue();
maxFrameIndex = Math.max(maxFrameIndex,frameIndex.intValue());
maxFrameIndex = Math.max(maxFrameIndex, frameIndex.intValue());
if(minChannelIndex == -1)
if (minChannelIndex == -1)
minChannelIndex = channelIndex.intValue();
minChannelIndex = Math.min(minChannelIndex,channelIndex.intValue());
minChannelIndex = Math.min(minChannelIndex, channelIndex.intValue());
if(maxChannelIndex == -1)
if (maxChannelIndex == -1)
maxChannelIndex = channelIndex.intValue();
maxChannelIndex = Math.max(maxChannelIndex,channelIndex.intValue());
maxChannelIndex = Math.max(maxChannelIndex, channelIndex.intValue());
TimeFrame frame = new TimeFrame(frameIndex.intValue());
int idx = frameList.indexOf(frame);
if(idx != -1)
{
if (idx != -1) {
frame = (TimeFrame) frameList.get(idx);
frame.addChannel(channelIndex.intValue());
}
else
{
} else {
frame.addChannel(channelIndex.intValue());
frameList.add(frame);
}
//System.out.println(frame.toString());
// System.out.println(frame.toString());
}
}
}
public int getMinFrameIndex()
{
public int getMinFrameIndex() {
return minFrameIndex;
}
public int getMaxFrameIndex()
{
public int getMaxFrameIndex() {
return maxFrameIndex;
}
public int getMinChannelIndex()
{
public int getMinChannelIndex() {
return minChannelIndex;
}
public int getMaxChannelIndex()
{
public int getMaxChannelIndex() {
return maxChannelIndex;
}
public int getNFrames()
{
public int getNFrames() {
return frameList.size();
}
public int getNChannels()
{
public int getNChannels() {
// TODO: check all frames for min/max of channels not index
if(nChannels == -1)
return maxChannelIndex-minChannelIndex+1;
if (nChannels == -1)
return maxChannelIndex - minChannelIndex + 1;
else
return nChannels;
}
public boolean hasAllFramesInRange()
{
return frameList.size() == (maxFrameIndex-minFrameIndex+1);
public boolean hasAllFramesInRange() {
return frameList.size() == (maxFrameIndex - minFrameIndex + 1);
}
public String toString()
{
public String toString() {
String s = "Data set statistics\n";
s = s + "----------------------------------\n";
s = s + "nFrames: " + Integer.toString(frameList.size()) + "\n";
s = s + "minFrameIndex: " + Integer.toString(minFrameIndex) + "\n";
s = s + "maxFrameIndex: " + Integer.toString(maxFrameIndex) + "\n";
s = s
+ "hasAllFramesInRange: "
+ Boolean.toString(hasAllFramesInRange()) + "\n";
s = s + "hasAllFramesInRange: " + Boolean.toString(hasAllFramesInRange()) + "\n";
s = s + "minChannelIndex: " + Integer.toString(minChannelIndex) + "\n";
s = s + "maxChannelIndex: " + Integer.toString(maxChannelIndex) + "\n";
// String[] toks = getFormatTokens();
Iterator<TimeFrame> frames = frameList.iterator();
while(frames.hasNext())
{
while (frames.hasNext()) {
TimeFrame f = frames.next();
s = s + f.toString() + "\n";
// s = s + "(" + toks[0] +
@ -253,51 +199,34 @@ public class HDF5_GroupedVarnames
return s;
}
public List<String> getUnmatchedVarNames()
{
public List<String> getUnmatchedVarNames() {
return unMatchedVarNames;
}
public String[] getFormatTokens()
{
public String[] getFormatTokens() {
return formatTokens;
}
public String getFormatString()
{
public String getFormatString() {
return formatString;
}
public void setFrameAndChannelRange(int minFrame,
int skipFrame,
int maxFrame,
int minChannel,
int skipChannel,
int maxChannel)
{
System.out.println("Setting frame range: " +
Integer.toString(minFrame) + ":" +
Integer.toString(skipFrame) + ":" +
Integer.toString(maxFrame));
System.out.println("Setting channel range: " +
Integer.toString(minChannel) + ":" +
Integer.toString(skipChannel) + ":" +
Integer.toString(maxChannel));
if(hasAllFramesInRange())
{
public void setFrameAndChannelRange(int minFrame, int skipFrame, int maxFrame, int minChannel, int skipChannel, int maxChannel) {
System.out.println("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
System.out.println("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
if (hasAllFramesInRange()) {
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
// clear frames
frameList.clear();
// insert wanted frames and channels
for(int f=minFrame;f<maxFrame+1;f+=skipFrame)
{
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
TimeFrame frameAllChannels = completeFrameList.get(f);
TimeFrame frame = new TimeFrame(frameAllChannels.getFrameIndex());
// TODO remove unwanted channels
for(int c=minChannel;c<maxChannel+1;c+=skipChannel)
{
//System.out.println("Adding channels: " + Integer.toString(c));
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// System.out.println("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
// if(nChannels == -1)
@ -305,57 +234,37 @@ public class HDF5_GroupedVarnames
frameList.add(frame);
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel-minChannel) / skipChannel)+1;
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
}
else
{
System.out.println("-------------------------\n"+
"hasAllFramesInRange==false\n"+
"-------------------------");
} else {
System.out.println("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
// clear frames
frameList.clear();
// insert wanted frames and channels
for(int f=minFrame;f<maxFrame+1;f+=skipFrame)
{
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
TimeFrame frame = new TimeFrame(f);
int idx = completeFrameList.indexOf(frame);
// System.out.println("index of frame in list: " +
// Integer.toString(idx));
if(idx != -1)
{
if (idx != -1) {
// TODO remove unwanted channels
for(int c=minChannel;c<maxChannel+1;c+=skipChannel)
{
//System.out.println("Adding channels: " + Integer.toString(c));
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// System.out.println("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
// if(nChannels == -1)
// nChannels = frame.getNChannels();
frameList.add(frame);
}
else
{
System.out.println("Timestep "+Integer.toString(f)+
" is missing!");
} else {
System.out.println("Timestep " + Integer.toString(f) + " is missing!");
}
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel-minChannel) / skipChannel)+1;
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
}
}
private final List<String> matchedVarNames = new ArrayList<String>();
private final List<String> unMatchedVarNames = new ArrayList<String>();
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
private String[] formatTokens = null;
private String formatString = null;
private int minFrameIndex = -1;
private int maxFrameIndex = -1;
private int minChannelIndex = -1;
private int maxChannelIndex = -1;
private int nChannels = -1;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +1,24 @@
package ch.psi.imagej.hdf5;
/* =========================================================================
/*
* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*=========================================================================*/
* =========================================================================
*/
import ij.*;
import ij.io.*;
@ -33,15 +35,12 @@ import ncsa.hdf.object.*; // the common object package
import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5_Writer_ implements PlugInFilter
{
public class HDF5_Writer_ implements PlugInFilter {
private Boolean _batchMode = false;
private String[] _batchVarNames = null;
private String _batchFileName = null;
public int setup(String arg, ImagePlus imp)
{
if (arg.equals("about"))
{
public int setup(String arg, ImagePlus imp) {
if (arg.equals("about")) {
showAbout();
return DONE;
}
@ -49,34 +48,26 @@ public class HDF5_Writer_ implements PlugInFilter
// currently RGB-Types are still missing
// see
// http://rsb.info.nih.gov/ij/developer/api/ij/plugin/filter/PlugInFilter.html
return DOES_8G + DOES_16 + DOES_32 + DOES_RGB
+ NO_CHANGES;
return DOES_8G + DOES_16 + DOES_32 + DOES_RGB + NO_CHANGES;
}
public void setToBatchMode(String filename, String [] varnames)
{
public void setToBatchMode(String filename, String[] varnames) {
_batchMode = true;
_batchFileName = filename;
_batchVarNames = varnames;
}
public void run(ImageProcessor ip)
{
public void run(ImageProcessor ip) {
int[] wList = WindowManager.getIDList();
if (wList == null)
{
if (wList == null) {
IJ.error("No windows are open.");
return;
}
String filename = null;
if(_batchMode)
{
if (_batchMode) {
filename = _batchFileName;
}
else
{
} else {
SaveDialog sd = new SaveDialog("Save HDF5 ...", "", ".h5");
String directory = sd.getDirectory();
String name = sd.getFileName();
@ -88,23 +79,15 @@ public class HDF5_Writer_ implements PlugInFilter
return;
}
// Retrieve an instance of the implementing class for the HDF5 format
FileFormat fileFormat =
FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
// If the implementing class wasn't found, it's an error.
if (fileFormat == null)
{
if (fileFormat == null) {
System.err.println("Cannot find HDF5 FileFormat.");
return;
}
String[] varNames = null;
if(_batchMode)
{
varNames = _batchVarNames;
}
ImagePlus imp = WindowManager.getCurrentImage();
@ -112,11 +95,10 @@ public class HDF5_Writer_ implements PlugInFilter
gd = new GenericDialog("Variable Name Selection");
// check for hyperstack
if(imp.getOpenAsHyperStack() || imp.isHyperStack())
{
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
System.out.println("This is a hyperstack");
boolean splitChannels = true;
gd.addCheckbox("Split frames and channels",splitChannels);
gd.addCheckbox("Split frames and channels", splitChannels);
gd.addStringField(imp.getTitle(), "/t$T/channel$C");
String title = imp.getTitle();
int nDims = imp.getNDimensions();
@ -125,98 +107,74 @@ public class HDF5_Writer_ implements PlugInFilter
int nLevs = imp.getNSlices();
int nRows = imp.getHeight();
int nCols = imp.getWidth();
boolean isComposite = imp.isComposite() ;
System.out.println("isComposite: "+Boolean.toString(isComposite));
System.out.println("Saving image \""+title+"\"");
System.out.println("nDims: "+Integer.toString(nDims));
System.out.println("nFrames: "+Integer.toString(nFrames));
System.out.println("nChannels: "+Integer.toString(nChannels));
System.out.println("nSlices: "+Integer.toString(nLevs));
System.out.println("nRows: "+Integer.toString(nRows));
System.out.println("nCols: "+Integer.toString(nCols));
boolean isComposite = imp.isComposite();
System.out.println("isComposite: " + Boolean.toString(isComposite));
System.out.println("Saving image \"" + title + "\"");
System.out.println("nDims: " + Integer.toString(nDims));
System.out.println("nFrames: " + Integer.toString(nFrames));
System.out.println("nChannels: " + Integer.toString(nChannels));
System.out.println("nSlices: " + Integer.toString(nLevs));
System.out.println("nRows: " + Integer.toString(nRows));
System.out.println("nCols: " + Integer.toString(nCols));
gd.showDialog();
if (gd.wasCanceled())
{
if (gd.wasCanceled()) {
IJ.error("Plugin canceled!");
return;
}
splitChannels = gd.getNextBoolean();
String formatString = gd.getNextString();
System.out.println("formatString: "+formatString);
System.out.println("Bitdepth: "+ imp.getBitDepth());
System.out.println("formatString: " + formatString);
System.out.println("Bitdepth: " + imp.getBitDepth());
System.out.println("Saving HDF5 File: " + filename);
int imgColorDepth = imp.getBitDepth();
int imgColorType = imp.getType();
Datatype type = null;
if (imgColorType == ImagePlus.GRAY8)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE,
Datatype.NATIVE, Datatype.SIGN_NONE);
}
else if (imgColorType == ImagePlus.GRAY16)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY16");
if (imgColorType == ImagePlus.GRAY8) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
int typeSizeInByte = 2;
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte,
Datatype.NATIVE, Datatype.SIGN_NONE);
}
else if (imgColorType == ImagePlus.GRAY32)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY32");
int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE,
Datatype.NATIVE, -1);
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
// int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
}
// open the outfile
H5File outFile = null;
try
{
outFile = (H5File) fileFormat.createFile(filename,
FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite())
{
try {
outFile = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite()) {
IJ.error("File `" + filename + "`is readonly!");
return;
}
// open the file
outFile.open();
if(splitChannels)
{
if (splitChannels) {
// parse format string
String[] formatTokens =
HDF5_GroupedVarnames.
parseFormatString(formatString,
"[0-9]+"); // dummy regexp
String[] formatTokens = HDF5_GroupedVarnames.parseFormatString(formatString, "[0-9]+"); // dummy
// regexp
long[] channelDims = null;
if(nLevs>1)
{
if (nLevs > 1) {
channelDims = new long[3];
channelDims[0] = nLevs;
channelDims[1] = nRows;
channelDims[2] = nCols;
}
else
{
} else {
channelDims = new long[2];
channelDims[0] = nRows;
channelDims[1] = nCols;
}
// iterate over frames and channels
ImageStack stack = imp.getStack();
for(int f=0;f<nFrames;f++)
{
for (int f = 0; f < nFrames; f++) {
IJ.showProgress(f, nFrames);
for(int c=0;c<nChannels;c++)
{
String fullName = makeDataSetName(formatTokens,
f,c);
for (int c = 0; c < nChannels; c++) {
String fullName = makeDataSetName(formatTokens, f, c);
String dataSetName = getDataSetDescriptor(fullName);
System.out.println("dataset name: " + dataSetName);
String groupName = getGroupDescriptor(fullName);
@ -229,25 +187,15 @@ public class HDF5_Writer_ implements PlugInFilter
long[] maxdims = channelDims;
long[] chunks = null;
int gzip = 0; // no compression
try
{
try {
dataset = (Dataset) outFile.get(groupName + "/" + dataSetName);
}
catch( Exception e)
{
} catch (Exception e) {
dataset = null;
}
if(dataset == null)
{
try
{
dataset = outFile.createScalarDS(dataSetName, group, type,
channelDims,
maxdims, chunks, gzip,
null);
}
catch (Exception err)
{
if (dataset == null) {
try {
dataset = outFile.createScalarDS(dataSetName, group, type, channelDims, maxdims, chunks, gzip, null);
} catch (Exception err) {
IJ.error(err.getMessage());
return;
}
@ -260,81 +208,61 @@ public class HDF5_Writer_ implements PlugInFilter
// dataet
// write levels
System.out.println("selected.length: "+
Integer.toString(selected.length));
System.out.println("channelDims.length: "+
Integer.toString(channelDims.length));
if(nLevs == 1)
{
for (int d = 0; d < selected.length; d++)
{
System.out.println("selected.length: " + Integer.toString(selected.length));
System.out.println("channelDims.length: " + Integer.toString(channelDims.length));
if (nLevs == 1) {
for (int d = 0; d < selected.length; d++) {
selected[d] = channelDims[d];
}
int stackIndex = imp.getStackIndex(c+1,1,f+1);
System.out.println("Stackindex: "+
Integer.toString(stackIndex));
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
System.out.println("Stackindex: " + Integer.toString(stackIndex));
// get raw data
Object slice = stack.getPixels(stackIndex);
assert(slice != null);
assert (slice != null);
// write data
try
{
try {
dataset.write(slice);
}
catch(Exception e)
{
} catch (Exception e) {
IJ.showStatus("Error writing data to file.");
}
}
else
{
} else {
selected[0] = 1;
for (int d = 1; d < selected.length; d++)
{
for (int d = 1; d < selected.length; d++) {
selected[d] = channelDims[d];
}
long[] start = dataset.getStartDims(); // the off set of
long[] start = dataset.getStartDims(); // the
// off
// set
// of
// the selection
for (int lvl = 0; lvl < nLevs; ++lvl)
{
for (int lvl = 0; lvl < nLevs; ++lvl) {
// select hyperslab
start[0] = lvl;
int stackIndex = imp.getStackIndex(c+1,lvl+1,f+1);
int stackIndex = imp.getStackIndex(c + 1, lvl + 1, f + 1);
// get raw data
Object slice = stack.getPixels(stackIndex);
// write data
try
{
try {
dataset.write(slice);
}
catch(Exception e)
{
} catch (Exception e) {
IJ.showStatus("Error writing data to file.");
}
}
}
}
}
}
else
{
} else {
// write one big array
}
outFile.close();
}
catch (HDF5Exception err)
{
} catch (HDF5Exception err) {
IJ.error(err.getMessage());
return;
} catch (Exception err) {
IJ.error(err.getMessage());
return;
}
catch (Exception err)
{
IJ.error(err.getMessage());
return;
}
}
else
{
} else {
System.out.println("This is NO hyperstack");
// String title = imp.getTitle();
// int nDims = imp.getNDimensions();
@ -353,37 +281,28 @@ public class HDF5_Writer_ implements PlugInFilter
// System.out.println("nRows: "+Integer.toString(nRows));
// System.out.println("nCols: "+Integer.toString(nCols));
gd.addStringField(imp.getTitle(), "");
gd.showDialog();
if (gd.wasCanceled())
{
if (gd.wasCanceled()) {
IJ.error("Plugin canceled!");
return;
}
String varName = gd.getNextString();
if(varName == "")
{
if (varName == "") {
IJ.error("No data set name given. Plugin canceled!");
return;
}
// write data set
try
{
try {
H5File outFile = null;
try
{
outFile = (H5File) fileFormat.createFile(filename,
FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite())
{
try {
outFile = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite()) {
IJ.error("File `" + filename + "`is readonly!");
return;
}
}
catch (HDF5Exception err)
{
} catch (HDF5Exception err) {
IJ.error(err.getMessage());
return;
}
@ -413,19 +332,14 @@ public class HDF5_Writer_ implements PlugInFilter
imgColorDepth = imp.getBitDepth();
imgColorType = imp.getType();
long[] dims;
if (imgColorType == ImagePlus.COLOR_RGB ||
imgColorType == ImagePlus.COLOR_256)
{
if(nLevels == 1)
{
if (imgColorType == ImagePlus.COLOR_RGB || imgColorType == ImagePlus.COLOR_256) {
if (nLevels == 1) {
// color image
dims = new long[3];
dims[0] = nRows;
dims[1] = nCols;
dims[2] = 3;
}
else
{
} else {
// color images have 4 dimensions, grey value images
// have 3.
System.out.println("adding 4 dimensions");
@ -435,18 +349,13 @@ public class HDF5_Writer_ implements PlugInFilter
dims[2] = nCols;
dims[3] = 3;
}
}
else
{
if(nLevels == 1)
{
} else {
if (nLevels == 1) {
// color image
dims = new long[2];
dims[0] = nRows;
dims[1] = nCols;
}
else
{
} else {
System.out.println("adding 3 dimensions");
dims = new long[3];
dims[0] = nLevels;
@ -476,71 +385,45 @@ public class HDF5_Writer_ implements PlugInFilter
Datatype type = null;
// supported data types
// FIXME: set the right signed and precision stuff
if (imgColorType == ImagePlus.GRAY8)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE,
Datatype.NATIVE, Datatype.SIGN_NONE);
}
else if (imgColorType == ImagePlus.GRAY16)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY16");
if (imgColorType == ImagePlus.GRAY8) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
int typeSizeInByte = 2;
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte,
Datatype.NATIVE, Datatype.SIGN_NONE);
}
else if (imgColorType == ImagePlus.GRAY32)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: GRAY32");
int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE,
Datatype.NATIVE, -1);
}
else if (imgColorType == ImagePlus.COLOR_RGB)
{
System.out.println(" bit depth: " + imgColorDepth
+ ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE,
Datatype.NATIVE, Datatype.SIGN_NONE);
}
else if (imgColorType == ImagePlus.COLOR_256)
{
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
// int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
} else if (imgColorType == ImagePlus.COLOR_RGB) {
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.COLOR_256) {
// FIXME: not supported yet
System.out.println(" bit depth: " + imgColorDepth
+ ", type: COLOR_256");
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_256");
System.out.println(" ERROR: untested, this might fail.");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE,
Datatype.NATIVE, Datatype.SIGN_NONE);
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
}
// select hyperslabs
long[] maxdims = dims;
int nDims = dims.length;
// long[] chunks = findOptimalChunksize( nDims,
// dims);
// long[] chunks = findOptimalChunksize( nDims,
// dims);
long[] chunks = null;
int gzip = 0; // no compression
// create dataset
Dataset dataset = null;
try
{
try {
dataset = (Dataset) outFile.get(groupName + "/" + dataSetName);
}
catch( Exception e)
{
} catch (Exception e) {
dataset = null;
}
if(dataset == null)
{
if (dataset == null) {
dataset = outFile.createScalarDS(dataSetName, group, type,
dims, maxdims, chunks, gzip,
null);
dataset = outFile.createScalarDS(dataSetName, group, type, dims, maxdims, chunks, gzip, null);
}
dataset.init();
long[] selected = dataset.getSelectedDims(); // the
@ -549,38 +432,32 @@ public class HDF5_Writer_ implements PlugInFilter
// the
// dataet
ImageStack stack = imp.getStack();
if(nLevels == 1)
{
for (int d = 0; d < selected.length; d++)
{
if (nLevels == 1) {
for (int d = 0; d < selected.length; d++) {
selected[d] = dims[d];
}
// get raw data
Object slice = stack.getPixels(nLevels);
if(imgColorType == ImagePlus.COLOR_RGB)
if (imgColorType == ImagePlus.COLOR_RGB)
slice = computeRgbSlice(stack.getPixels(nLevels));
// write data
dataset.write(slice);
}
else
{
} else {
selected[0] = 1;
for (int d = 1; d < selected.length; d++)
{
for (int d = 1; d < selected.length; d++) {
selected[d] = dims[d];
}
long[] start = dataset.getStartDims(); // the off set of
// the selection
for (int lvl = 0; lvl < nLevels; ++lvl)
{
for (int lvl = 0; lvl < nLevels; ++lvl) {
IJ.showProgress(lvl, nLevels);
// select hyperslab
start[0] = lvl;
// get raw data
Object slice = stack.getPixels(lvl + 1);
if(imgColorType == ImagePlus.COLOR_RGB)
if (imgColorType == ImagePlus.COLOR_RGB)
slice = computeRgbSlice(stack.getPixels(lvl + 1));
// write data
dataset.write(slice);
@ -588,101 +465,70 @@ public class HDF5_Writer_ implements PlugInFilter
}
// get pixel sizes
ij.measure.Calibration cal = imp.getCalibration();
System.out.println(" Element-Size in um (level,row,col): "
+ cal.pixelDepth + ", " + cal.pixelHeight + ", "
+ cal.pixelWidth);
System.out.println(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
float[] element_sizes = new float[3];
element_sizes[0] = (float) cal.pixelDepth;
element_sizes[1] = (float) cal.pixelHeight;
element_sizes[2] = (float) cal.pixelWidth;
Datatype attrType = new H5Datatype(Datatype.CLASS_FLOAT,
Datatype.NATIVE, Datatype.NATIVE,
-1);
Datatype attrType = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
long[] attrDims = { 3 };
Attribute element_size_um = null;
try
{
try {
element_size_um = getAttribute(dataset, "element_size_um");
}
catch(Exception e)
{
} catch (Exception e) {
element_size_um = null;
}
if(element_size_um == null)
{
element_size_um = new Attribute("element_size_um",
attrType, attrDims);
if (element_size_um == null) {
element_size_um = new Attribute("element_size_um", attrType, attrDims);
}
element_size_um.setValue(element_sizes);
// write element_size_um
dataset.writeMetadata(element_size_um);
outFile.close();
}
catch (HDF5Exception err)
{
} catch (HDF5Exception err) {
System.err.println("Caught HDF5Exception");
err.printStackTrace();
}
catch (java.io.IOException err)
{
System.err.println("IO Error while writing '" + filename + "': "
+ err);
}
catch (Exception err)
{
System.err.println("Range Error while writing '" + filename
+ "': " + err);
} catch (java.io.IOException err) {
System.err.println("IO Error while writing '" + filename + "': " + err);
} catch (Exception err) {
System.err.println("Range Error while writing '" + filename + "': " + err);
}
}
}
int byteToUnsignedByte(int n)
{
int byteToUnsignedByte(int n) {
if (n < 0)
return (256 + n);
return n;
}
void showAbout()
{
IJ.showMessage("About HDF5 Writer:", "Written by Matthias Schlachter\n"
+ "University of Freiburg, 2010");
void showAbout() {
IJ.showMessage("About HDF5 Writer:", "Written by Matthias Schlachter\n" + "University of Freiburg, 2010");
}
private static Group createGroupRecursive(String groupRelativName,
Group group, FileFormat file)
{
private static Group createGroupRecursive(String groupRelativName, Group group, FileFormat file) {
if (groupRelativName == null || file == null)
return null;
if (group == null)
group = (Group) ((DefaultMutableTreeNode) file.getRootNode())
.getUserObject();
group = (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject();
while (groupRelativName.charAt(0) == '/')
{
while (groupRelativName.charAt(0) == '/') {
// trim leading slash
groupRelativName = groupRelativName.substring(1);
}
while (groupRelativName.charAt(groupRelativName.length() - 1) == '/')
{
while (groupRelativName.charAt(groupRelativName.length() - 1) == '/') {
// trim last slash
groupRelativName = groupRelativName.substring(0, groupRelativName
.length() - 2);
groupRelativName = groupRelativName.substring(0, groupRelativName.length() - 2);
}
int posOfSlash = groupRelativName.indexOf('/');
if (posOfSlash == -1)
{
try
{
if (posOfSlash == -1) {
try {
Group newGroup;
String newGroupName;
if (group.isRoot())
@ -693,20 +539,15 @@ public class HDF5_Writer_ implements PlugInFilter
if (newGroup == null)
newGroup = file.createGroup(newGroupName, group);
return newGroup;
}
catch (Exception e)
{
} catch (Exception e) {
return null;
}
}
else
{
} else {
String subgroupRelativName = groupRelativName.substring(posOfSlash);
String currentGroup = groupRelativName.substring(0, posOfSlash);
System.out.println("Create: " + currentGroup);
System.out.println("Call back for: " + subgroupRelativName);
try
{
try {
Group newGroup;
String newGroupName;
if (group.isRoot())
@ -714,16 +555,14 @@ public class HDF5_Writer_ implements PlugInFilter
else
newGroupName = group.getFullName() + "/" + currentGroup;
System.out.println("try opening: "+newGroupName);
System.out.println("try opening: " + newGroupName);
newGroup = (Group) file.get(newGroupName);
if (newGroup == null)
newGroup = file.createGroup(newGroupName, group);
return createGroupRecursive(subgroupRelativName, newGroup, file);
}
catch (Exception e)
{
} catch (Exception e) {
return null;
}
@ -731,17 +570,14 @@ public class HDF5_Writer_ implements PlugInFilter
// never come here
}
private static String getGroupDescriptor(String absName)
{
private static String getGroupDescriptor(String absName) {
String groupName = absName;
while (groupName.charAt(0) == '/')
{
while (groupName.charAt(0) == '/') {
// trim leading slash
groupName = groupName.substring(1);
}
while (groupName.charAt(groupName.length() - 1) == '/')
{
while (groupName.charAt(groupName.length() - 1) == '/') {
// trim last slash
groupName = groupName.substring(0, groupName.length() - 2);
}
@ -752,16 +588,13 @@ public class HDF5_Writer_ implements PlugInFilter
return groupName.substring(0, posOfLastSlash);
}
private static String getDataSetDescriptor(String absName)
{
private static String getDataSetDescriptor(String absName) {
String dataSetName = absName;
while (dataSetName.charAt(0) == '/')
{
while (dataSetName.charAt(0) == '/') {
// trim leading slash
dataSetName = dataSetName.substring(1);
}
while (dataSetName.charAt(dataSetName.length() - 1) == '/')
{
while (dataSetName.charAt(dataSetName.length() - 1) == '/') {
// trim last slash
dataSetName = dataSetName.substring(0, dataSetName.length() - 2);
}
@ -772,48 +605,37 @@ public class HDF5_Writer_ implements PlugInFilter
return dataSetName.substring(posOfLastSlash + 1);
}
long[] findOptimalChunksize( int Rank,
long[] dataDims)
{
long[] findOptimalChunksize(int Rank, long[] dataDims) {
long[] best_chunksize = new long[Rank];
int maxChunkVol = 262144;
// small sanity check first:
int data_volume = 1;
for( int d = 0; d < Rank; ++d)
for (int d = 0; d < Rank; ++d)
data_volume *= dataDims[d];
if( data_volume < maxChunkVol) {
for( int d = 0; d < Rank; ++d)
if (data_volume < maxChunkVol) {
for (int d = 0; d < Rank; ++d)
best_chunksize[d] = dataDims[d];
return best_chunksize;
}
else return null;
} else
return null;
}
private static List<Attribute> getAttrList(Dataset ds) throws Exception
{
private static List<Attribute> getAttrList(Dataset ds) throws Exception {
if (ds == null)
return null;
List<Attribute> attributes = new ArrayList<Attribute>();
List members = ds.getMetadata();
List<?> members = ds.getMetadata();
int n = members.size();
Metadata obj = null;
for (int i = 0; i < n; i++)
{
for (int i = 0; i < n; i++) {
obj = (Metadata) members.get(i);
if (obj instanceof Attribute)
{
try
{
if (obj instanceof Attribute) {
try {
System.out.println(((Attribute) obj).getName());
attributes.add((Attribute) obj);
}
catch (java.lang.UnsupportedOperationException e)
{
System.out
.println("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
} catch (java.lang.UnsupportedOperationException e) {
System.out.println("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
System.out.println(e.getMessage());
}
}
@ -821,51 +643,38 @@ public class HDF5_Writer_ implements PlugInFilter
return attributes;
}
private static Attribute
getAttribute(Dataset ds, String attrName) throws Exception
{
private static Attribute getAttribute(Dataset ds, String attrName) throws Exception {
List<Attribute> attrList = getAttrList(ds);
Iterator<Attribute> attrIter = attrList.iterator();
while (attrIter.hasNext())
{
while (attrIter.hasNext()) {
Attribute attr = attrIter.next();
if (attr.getName().equals(attrName))
{
if (attr.getName().equals(attrName)) {
return attr;
}
}
return null;
}
private Object computeRgbSlice(Object pixels)
{
private Object computeRgbSlice(Object pixels) {
byte rgbslice[];
int size = ((int[])pixels).length;
rgbslice = new byte[size*3];
for(int i=0;i<size;i++)
{
int red= (((int[]) pixels)[i] & 0xff0000) >> 16;
int green= (((int[]) pixels)[i] & 0x00ff00) >> 8;
int blue= ((int[]) pixels)[i] & 0x0000ff;
rgbslice[3*i + 0] = (byte) red;
rgbslice[3*i + 1] = (byte) green;
rgbslice[3*i + 2] =(byte) blue;
int size = ((int[]) pixels).length;
rgbslice = new byte[size * 3];
for (int i = 0; i < size; i++) {
int red = (((int[]) pixels)[i] & 0xff0000) >> 16;
int green = (((int[]) pixels)[i] & 0x00ff00) >> 8;
int blue = ((int[]) pixels)[i] & 0x0000ff;
rgbslice[3 * i + 0] = (byte) red;
rgbslice[3 * i + 1] = (byte) green;
rgbslice[3 * i + 2] = (byte) blue;
}
return rgbslice;
}
private String makeDataSetName(String[] toks,int frame,int channel)
{
String dName =
toks[0] +
Integer.toString(frame) +
toks[1] +
Integer.toString(channel);
if(toks.length>2)
private String makeDataSetName(String[] toks, int frame, int channel) {
String dName = toks[0] + Integer.toString(frame) + toks[1] + Integer.toString(channel);
if (toks.length > 2)
dName = dName + toks[2];
return dName;
}
// end of class
}

View File

@ -1,102 +1,74 @@
package ch.psi.imagej.hdf5;
/* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class TimeFrame implements Comparable
{
public TimeFrame(int index)
{
public class TimeFrame implements Comparable<TimeFrame> {
private final int frameIndex;
private final List<Integer> channels = new ArrayList<Integer>();
public TimeFrame(int index) {
frameIndex = index;
}
public TimeFrame(String index)
{
public TimeFrame(String index) {
frameIndex = Integer.parseInt(index);
}
public void addChannel(int index)
{
Integer channelIndex = new Integer(index);
if(!channels.contains(channelIndex))
public void addChannel(Integer index) {
if (!channels.contains(index)){
channels.add(new Integer(index));
else
System.out.println("channel" + channelIndex.toString()
+ " already in list!");
}
}
public void addChannel(String index)
{
public void addChannel(String index) {
addChannel(Integer.parseInt(index));
}
public boolean equals(Object obj)
{
TimeFrame f = (TimeFrame) obj;
if(f.frameIndex == frameIndex)
return true;
return false;
public boolean equals(Object o) {
return (((TimeFrame)o).frameIndex == frameIndex);
}
public String toString()
{
public String toString() {
String s = "FrameIdx: " + Integer.toString(frameIndex) + "; ";
s = s + "nChannels: " + Integer.toString(channels.size()) + "; ";
s = s + "channels: ";
for(int i=0;i<channels.size();i++)
for (int i = 0; i < channels.size(); i++){
s = s + Integer.toString(channels.get(i)) + ";";
}
return s;
}
public int getNChannels()
{
public int getNChannels() {
return channels.size();
}
public int getFrameIndex()
{
public int getFrameIndex() {
return frameIndex;
}
public int[] getChannelIndices()
{
public int[] getChannelIndices() {
Object[] channelsAsArray = channels.toArray();
Arrays.sort(channelsAsArray);
int[] channelsIdx = new int[channelsAsArray.length];
for(int i=0;i<channelsAsArray.length;i++)
for (int i = 0; i < channelsAsArray.length; i++){
channelsIdx[i] = ((Integer) channelsAsArray[i]).intValue();
}
return channelsIdx;
}
public int compareTo(Object obj)
{
TimeFrame f = (TimeFrame) obj;
if(frameIndex<f.frameIndex)
public int compareTo(TimeFrame f) {
if (frameIndex < f.frameIndex){
return -1;
else if(frameIndex>f.frameIndex)
}
else if (frameIndex > f.frameIndex){
return 1;
else
}
else{
return 0;
}
private final int frameIndex;
private final ArrayList<Integer> channels = new ArrayList<Integer>();
}
}