mirror of
https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5.git
synced 2025-06-07 02:50:42 +02:00
replaced System.outs with log messages
This commit is contained in:
parent
a5d175bd2e
commit
580c543a5d
@ -3,11 +3,15 @@ package ch.psi.imagej.hdf5;
|
|||||||
import ij.Prefs;
|
import ij.Prefs;
|
||||||
import ij.gui.GenericDialog;
|
import ij.gui.GenericDialog;
|
||||||
import ij.plugin.PlugIn;
|
import ij.plugin.PlugIn;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
import java.util.regex.*;
|
import java.util.regex.*;
|
||||||
import java.lang.String;
|
import java.lang.String;
|
||||||
|
|
||||||
public class HDF5Config implements PlugIn {
|
public class HDF5Config implements PlugIn {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(HDF5Config.class.getName());
|
||||||
|
|
||||||
public static String GROUP_VARS_BY_NAME = "HDF5.groupVarsByName";
|
public static String GROUP_VARS_BY_NAME = "HDF5.groupVarsByName";
|
||||||
public static String SHOW_UNMATCHED_DATASET_NAMES = "HDF5.showUnmatchedDataSetNames";
|
public static String SHOW_UNMATCHED_DATASET_NAMES = "HDF5.showUnmatchedDataSetNames";
|
||||||
public static String GROUP_VARS_BY_NAME_FORMAT_GROUP = "HDF5.groupVarsByNameFormatGroup";
|
public static String GROUP_VARS_BY_NAME_FORMAT_GROUP = "HDF5.groupVarsByNameFormatGroup";
|
||||||
@ -55,7 +59,7 @@ public class HDF5Config implements PlugIn {
|
|||||||
}
|
}
|
||||||
if (!configDiag.wasOKed()) {
|
if (!configDiag.wasOKed()) {
|
||||||
// reset button was pressed
|
// reset button was pressed
|
||||||
System.out.println("reset button was pressed");
|
logger.info("reset button was pressed");
|
||||||
// reset all and return a new dialog
|
// reset all and return a new dialog
|
||||||
configDiag.setVisible(false);
|
configDiag.setVisible(false);
|
||||||
this.run(arg);
|
this.run(arg);
|
||||||
@ -64,36 +68,36 @@ public class HDF5Config implements PlugIn {
|
|||||||
// get parameters check if they are correct
|
// get parameters check if they are correct
|
||||||
|
|
||||||
groupVarsByName = configDiag.getNextBoolean();
|
groupVarsByName = configDiag.getNextBoolean();
|
||||||
System.out.println("groupVarsByName: " + Boolean.toString(groupVarsByName));
|
logger.info("groupVarsByName: " + Boolean.toString(groupVarsByName));
|
||||||
|
|
||||||
showUnmatchedDataSetNames = configDiag.getNextBoolean();
|
showUnmatchedDataSetNames = configDiag.getNextBoolean();
|
||||||
System.out.println("showUnmatchedDataSetNames: " + Boolean.toString(showUnmatchedDataSetNames));
|
logger.info("showUnmatchedDataSetNames: " + Boolean.toString(showUnmatchedDataSetNames));
|
||||||
|
|
||||||
groupVarsByNameFormatGroup = configDiag.getNextString();
|
groupVarsByNameFormatGroup = configDiag.getNextString();
|
||||||
System.out.println("groupVarsByNameFormatGroup: " + groupVarsByNameFormatGroup);
|
logger.info("groupVarsByNameFormatGroup: " + groupVarsByNameFormatGroup);
|
||||||
|
|
||||||
groupVarsByNameFormat = configDiag.getNextString();
|
groupVarsByNameFormat = configDiag.getNextString();
|
||||||
System.out.println("groupVarsByNameFormat: " + groupVarsByNameFormat);
|
logger.info("groupVarsByNameFormat: " + groupVarsByNameFormat);
|
||||||
|
|
||||||
// dollarRegexpForGrouping = configDiag.getNextString();
|
// dollarRegexpForGrouping = configDiag.getNextString();
|
||||||
// System.out.println("dollarRegexpForGrouping: " +
|
// logger.info("dollarRegexpForGrouping: " +
|
||||||
// dollarRegexpForGrouping);
|
// dollarRegexpForGrouping);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String[] formatTokens = HDF5GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
|
String[] formatTokens = HDF5GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
|
||||||
for (int i = 0; i < formatTokens.length; i++) {
|
for (int i = 0; i < formatTokens.length; i++) {
|
||||||
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
|
logger.info("tok " + Integer.toString(i) + " : " + formatTokens[i]);
|
||||||
}
|
}
|
||||||
} catch (PatternSyntaxException e) {
|
} catch (PatternSyntaxException e) {
|
||||||
// produce an error dialog an start over
|
// produce an error dialog an start over
|
||||||
String errMsg = e.getMessage();
|
String errMsg = e.getMessage();
|
||||||
System.out.println(errMsg);
|
logger.info(errMsg);
|
||||||
// reset all and return a new dialog
|
// reset all and return a new dialog
|
||||||
configDiag.setVisible(false);
|
configDiag.setVisible(false);
|
||||||
this.run(arg);
|
this.run(arg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
System.out.println("Saving...");
|
logger.info("Saving...");
|
||||||
|
|
||||||
// all OK and "Save" was pressed, so save it...
|
// all OK and "Save" was pressed, so save it...
|
||||||
Prefs.set(GROUP_VARS_BY_NAME, groupVarsByName);
|
Prefs.set(GROUP_VARS_BY_NAME, groupVarsByName);
|
||||||
@ -146,7 +150,7 @@ public class HDF5Config implements PlugIn {
|
|||||||
String dollarRegexpForGrouping = "[0-9]+"; // default
|
String dollarRegexpForGrouping = "[0-9]+"; // default
|
||||||
return dollarRegexpForGrouping;
|
return dollarRegexpForGrouping;
|
||||||
} else {
|
} else {
|
||||||
System.out.println("No default value for key: " + key);
|
logger.info("No default value for key: " + key);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package ch.psi.imagej.hdf5;
|
package ch.psi.imagej.hdf5;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
import java.util.regex.*;
|
import java.util.regex.*;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
@ -8,6 +9,8 @@ import java.util.List;
|
|||||||
|
|
||||||
public class HDF5GroupedVarnames {
|
public class HDF5GroupedVarnames {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(HDF5GroupedVarnames.class.getName());
|
||||||
|
|
||||||
private final List<String> matchedVarNames = new ArrayList<String>();
|
private final List<String> matchedVarNames = new ArrayList<String>();
|
||||||
private final List<String> unMatchedVarNames = new ArrayList<String>();
|
private final List<String> unMatchedVarNames = new ArrayList<String>();
|
||||||
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
|
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
|
||||||
@ -26,7 +29,7 @@ public class HDF5GroupedVarnames {
|
|||||||
boolean rightOrderOfFormatVars = groupVarsByNameFormat.indexOf("$T") < groupVarsByNameFormat.indexOf("$C");
|
boolean rightOrderOfFormatVars = groupVarsByNameFormat.indexOf("$T") < groupVarsByNameFormat.indexOf("$C");
|
||||||
|
|
||||||
for (int i = 0; i < formatTokens.length; i++) {
|
for (int i = 0; i < formatTokens.length; i++) {
|
||||||
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
|
logger.info("tok " + Integer.toString(i) + " : " + formatTokens[i]);
|
||||||
}
|
}
|
||||||
if (formatTokens.length < 2 || !containsFormatVars || !rightOrderOfFormatVars) {
|
if (formatTokens.length < 2 || !containsFormatVars || !rightOrderOfFormatVars) {
|
||||||
throw new PatternSyntaxException("Your format string has errors. " + "You must provide $T and $C and " + "also in correct order!", groupVarsByNameFormat, -1);
|
throw new PatternSyntaxException("Your format string has errors. " + "You must provide $T and $C and " + "also in correct order!", groupVarsByNameFormat, -1);
|
||||||
@ -34,7 +37,7 @@ public class HDF5GroupedVarnames {
|
|||||||
String regexp = groupVarsByNameFormat;
|
String regexp = groupVarsByNameFormat;
|
||||||
regexp = regexp.replace("$T", dollarRegexpForGrouping);
|
regexp = regexp.replace("$T", dollarRegexpForGrouping);
|
||||||
regexp = regexp.replace("$C", dollarRegexpForGrouping);
|
regexp = regexp.replace("$C", dollarRegexpForGrouping);
|
||||||
System.out.println(regexp);
|
logger.info(regexp);
|
||||||
// check if we have a regexp;
|
// check if we have a regexp;
|
||||||
Pattern.compile(regexp);
|
Pattern.compile(regexp);
|
||||||
return formatTokens;
|
return formatTokens;
|
||||||
@ -48,14 +51,14 @@ public class HDF5GroupedVarnames {
|
|||||||
} catch (PatternSyntaxException e) {
|
} catch (PatternSyntaxException e) {
|
||||||
// produce an error dialog an start over
|
// produce an error dialog an start over
|
||||||
String errMsg = e.getMessage();
|
String errMsg = e.getMessage();
|
||||||
System.out.println(errMsg);
|
logger.info(errMsg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
String regexp = groupVarsByNameFormat;
|
String regexp = groupVarsByNameFormat;
|
||||||
regexp = regexp.replace("$T", dollarRegexpForGrouping);
|
regexp = regexp.replace("$T", dollarRegexpForGrouping);
|
||||||
regexp = regexp.replace("$C", dollarRegexpForGrouping);
|
regexp = regexp.replace("$C", dollarRegexpForGrouping);
|
||||||
|
|
||||||
System.out.println(regexp);
|
logger.info(regexp);
|
||||||
// check if we have a regexp;
|
// check if we have a regexp;
|
||||||
Pattern p = null;
|
Pattern p = null;
|
||||||
p = Pattern.compile(regexp);
|
p = Pattern.compile(regexp);
|
||||||
@ -66,7 +69,7 @@ public class HDF5GroupedVarnames {
|
|||||||
Matcher m = p.matcher(varNames[i]);
|
Matcher m = p.matcher(varNames[i]);
|
||||||
boolean b = m.matches();
|
boolean b = m.matches();
|
||||||
if (b) {
|
if (b) {
|
||||||
System.out.println(varNames[i]);
|
logger.info(varNames[i]);
|
||||||
matchedVarNames.add(varNames[i]);
|
matchedVarNames.add(varNames[i]);
|
||||||
} else {
|
} else {
|
||||||
unMatchedVarNames.add(varNames[i]);
|
unMatchedVarNames.add(varNames[i]);
|
||||||
@ -101,14 +104,14 @@ public class HDF5GroupedVarnames {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (tokens.length < 2 || tokens.length > 3) {
|
if (tokens.length < 2 || tokens.length > 3) {
|
||||||
System.out.println("Error parsing varname!");
|
logger.info("Error parsing varname!");
|
||||||
} else {
|
} else {
|
||||||
Integer channelIndex = new Integer(tokens[1]);
|
Integer channelIndex = new Integer(tokens[1]);
|
||||||
System.out.println("channelIndex: " + channelIndex.toString());
|
logger.info("channelIndex: " + channelIndex.toString());
|
||||||
System.out.println("left token: " + tokens[0]);
|
logger.info("left token: " + tokens[0]);
|
||||||
tokens = tokens[0].split("/t");
|
tokens = tokens[0].split("/t");
|
||||||
Integer frameIndex = new Integer(tokens[1]);
|
Integer frameIndex = new Integer(tokens[1]);
|
||||||
System.out.println("frameIndex: " + frameIndex.toString());
|
logger.info("frameIndex: " + frameIndex.toString());
|
||||||
|
|
||||||
if (minFrameIndex == -1)
|
if (minFrameIndex == -1)
|
||||||
minFrameIndex = frameIndex.intValue();
|
minFrameIndex = frameIndex.intValue();
|
||||||
@ -135,7 +138,7 @@ public class HDF5GroupedVarnames {
|
|||||||
frame.addChannel(channelIndex.intValue());
|
frame.addChannel(channelIndex.intValue());
|
||||||
frameList.add(frame);
|
frameList.add(frame);
|
||||||
}
|
}
|
||||||
// System.out.println(frame.toString());
|
// logger.info(frame.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -212,8 +215,8 @@ public class HDF5GroupedVarnames {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void setFrameAndChannelRange(int minFrame, int skipFrame, int maxFrame, int minChannel, int skipChannel, int maxChannel) {
|
public void setFrameAndChannelRange(int minFrame, int skipFrame, int maxFrame, int minChannel, int skipChannel, int maxChannel) {
|
||||||
System.out.println("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
|
logger.info("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
|
||||||
System.out.println("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
|
logger.info("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
|
||||||
if (hasAllFramesInRange()) {
|
if (hasAllFramesInRange()) {
|
||||||
// copy frames
|
// copy frames
|
||||||
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
|
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
|
||||||
@ -225,7 +228,7 @@ public class HDF5GroupedVarnames {
|
|||||||
TimeFrame frame = new TimeFrame(frameAllChannels.getFrameIndex());
|
TimeFrame frame = new TimeFrame(frameAllChannels.getFrameIndex());
|
||||||
// TODO remove unwanted channels
|
// TODO remove unwanted channels
|
||||||
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
|
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
|
||||||
// System.out.println("Adding channels: " +
|
// logger.info("Adding channels: " +
|
||||||
// Integer.toString(c));
|
// Integer.toString(c));
|
||||||
frame.addChannel(c);
|
frame.addChannel(c);
|
||||||
}
|
}
|
||||||
@ -235,9 +238,9 @@ public class HDF5GroupedVarnames {
|
|||||||
}
|
}
|
||||||
// TODO update min/max of frames/channels
|
// TODO update min/max of frames/channels
|
||||||
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
|
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
|
||||||
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
|
logger.info("Adding nChannels: " + Integer.toString(nChannels));
|
||||||
} else {
|
} else {
|
||||||
System.out.println("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
|
logger.info("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
|
||||||
// copy frames
|
// copy frames
|
||||||
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
|
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
|
||||||
// clear frames
|
// clear frames
|
||||||
@ -246,12 +249,12 @@ public class HDF5GroupedVarnames {
|
|||||||
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
|
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
|
||||||
TimeFrame frame = new TimeFrame(f);
|
TimeFrame frame = new TimeFrame(f);
|
||||||
int idx = completeFrameList.indexOf(frame);
|
int idx = completeFrameList.indexOf(frame);
|
||||||
// System.out.println("index of frame in list: " +
|
// logger.info("index of frame in list: " +
|
||||||
// Integer.toString(idx));
|
// Integer.toString(idx));
|
||||||
if (idx != -1) {
|
if (idx != -1) {
|
||||||
// TODO remove unwanted channels
|
// TODO remove unwanted channels
|
||||||
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
|
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
|
||||||
// System.out.println("Adding channels: " +
|
// logger.info("Adding channels: " +
|
||||||
// Integer.toString(c));
|
// Integer.toString(c));
|
||||||
frame.addChannel(c);
|
frame.addChannel(c);
|
||||||
}
|
}
|
||||||
@ -259,12 +262,12 @@ public class HDF5GroupedVarnames {
|
|||||||
// nChannels = frame.getNChannels();
|
// nChannels = frame.getNChannels();
|
||||||
frameList.add(frame);
|
frameList.add(frame);
|
||||||
} else {
|
} else {
|
||||||
System.out.println("Timestep " + Integer.toString(f) + " is missing!");
|
logger.info("Timestep " + Integer.toString(f) + " is missing!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// TODO update min/max of frames/channels
|
// TODO update min/max of frames/channels
|
||||||
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
|
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
|
||||||
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
|
logger.info("Adding nChannels: " + Integer.toString(nChannels));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,18 +134,18 @@ public class HDF5Reader implements PlugIn {
|
|||||||
attrName = attrSelecD.getNextChoice();
|
attrName = attrSelecD.getNextChoice();
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println("Reading attribute");
|
logger.info("Reading attribute");
|
||||||
Attribute attr = getAttribute(gr, attrName);
|
Attribute attr = getAttribute(gr, attrName);
|
||||||
System.out.println("Reading attribute is ok");
|
logger.info("Reading attribute is ok");
|
||||||
if (attr != null)
|
if (attr != null)
|
||||||
System.out.println("attr is not null");
|
logger.info("attr is not null");
|
||||||
System.out.println("attr.getName(): " + attr.getName());
|
logger.info("attr.getName(): " + attr.getName());
|
||||||
Datatype dType = attr.getType();
|
Datatype dType = attr.getType();
|
||||||
System.out.println(dType.getDatatypeDescription());
|
logger.info(dType.getDatatypeDescription());
|
||||||
|
|
||||||
Object tmp = attr.getValue();
|
Object tmp = attr.getValue();
|
||||||
if (tmp != null)
|
if (tmp != null)
|
||||||
System.out.println("get value is ok");
|
logger.info("get value is ok");
|
||||||
if (tmp instanceof String) {
|
if (tmp instanceof String) {
|
||||||
// we have a string
|
// we have a string
|
||||||
groupVarsByNameFormat = (String) tmp;
|
groupVarsByNameFormat = (String) tmp;
|
||||||
@ -156,14 +156,14 @@ public class HDF5Reader implements PlugIn {
|
|||||||
for (int i = 0; i < sTmp.length; i++)
|
for (int i = 0; i < sTmp.length; i++)
|
||||||
groupVarsByNameFormat = groupVarsByNameFormat + sTmp[i];
|
groupVarsByNameFormat = groupVarsByNameFormat + sTmp[i];
|
||||||
}
|
}
|
||||||
System.out.println("File has format string for grouping: " + groupVarsByNameFormat);
|
logger.info("File has format string for grouping: " + groupVarsByNameFormat);
|
||||||
} else {
|
} else {
|
||||||
System.out.println("File has no format string for grouping" + ", using default");
|
logger.info("File has no format string for grouping" + ", using default");
|
||||||
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
|
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
|
||||||
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
|
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println("Error occured read format string " + "for grouping, using default");
|
logger.info("Error occured read format string " + "for grouping, using default");
|
||||||
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
|
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
|
||||||
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
|
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
|
||||||
}
|
}
|
||||||
@ -208,7 +208,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
varNames[i] = varList.get(i).getFullName();
|
varNames[i] = varList.get(i).getFullName();
|
||||||
}
|
}
|
||||||
groupedVarnames.parseVarNames(varNames, groupVarsByNameFormat, dollarRegexpForGrouping);
|
groupedVarnames.parseVarNames(varNames, groupVarsByNameFormat, dollarRegexpForGrouping);
|
||||||
System.out.println(groupedVarnames.toString());
|
logger.info(groupedVarnames.toString());
|
||||||
|
|
||||||
// make the data set selection dialog
|
// make the data set selection dialog
|
||||||
minFrameIndex = groupedVarnames.getMinFrameIndex();
|
minFrameIndex = groupedVarnames.getMinFrameIndex();
|
||||||
@ -249,7 +249,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
varSelections[i] = title;
|
varSelections[i] = title;
|
||||||
defaultValues[i] = false;
|
defaultValues[i] = false;
|
||||||
}
|
}
|
||||||
System.out.println("addcheckboxgroup with " + unmatchedVarNames.size() + " rows");
|
logger.info("addcheckboxgroup with " + unmatchedVarNames.size() + " rows");
|
||||||
gd.addCheckboxGroup(unmatchedVarNames.size(), 1, varSelections, defaultValues);
|
gd.addCheckboxGroup(unmatchedVarNames.size(), 1, varSelections, defaultValues);
|
||||||
addScrollBars(gd);
|
addScrollBars(gd);
|
||||||
}
|
}
|
||||||
@ -282,7 +282,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
if (frameRangeToks.length == 1) {
|
if (frameRangeToks.length == 1) {
|
||||||
// single frame
|
// single frame
|
||||||
try {
|
try {
|
||||||
System.out.println("single frame");
|
logger.info("single frame");
|
||||||
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
||||||
maxFrameIndex = minFrameIndex;
|
maxFrameIndex = minFrameIndex;
|
||||||
wrongFrameRange = false;
|
wrongFrameRange = false;
|
||||||
@ -292,7 +292,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
} else if (frameRangeToks.length == 2) {
|
} else if (frameRangeToks.length == 2) {
|
||||||
// frame range with skipFrameIndex=1
|
// frame range with skipFrameIndex=1
|
||||||
try {
|
try {
|
||||||
System.out.println("frame range with skipFrameIndex=1");
|
logger.info("frame range with skipFrameIndex=1");
|
||||||
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
||||||
maxFrameIndex = Integer.parseInt(frameRangeToks[1]);
|
maxFrameIndex = Integer.parseInt(frameRangeToks[1]);
|
||||||
wrongFrameRange = false;
|
wrongFrameRange = false;
|
||||||
@ -302,7 +302,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
} else if (frameRangeToks.length == 3) {
|
} else if (frameRangeToks.length == 3) {
|
||||||
// frame range with skipFrameIndex
|
// frame range with skipFrameIndex
|
||||||
try {
|
try {
|
||||||
System.out.println("frame range with skipFrameIndex");
|
logger.info("frame range with skipFrameIndex");
|
||||||
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
|
||||||
skipFrameIndex = Integer.parseInt(frameRangeToks[1]);
|
skipFrameIndex = Integer.parseInt(frameRangeToks[1]);
|
||||||
maxFrameIndex = Integer.parseInt(frameRangeToks[2]);
|
maxFrameIndex = Integer.parseInt(frameRangeToks[2]);
|
||||||
@ -312,7 +312,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// wrong format
|
// wrong format
|
||||||
System.out.println("wrong format");
|
logger.info("wrong format");
|
||||||
wrongFrameRange = true;
|
wrongFrameRange = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -352,7 +352,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
if (wrongFrameRange || wrongChannelRange) {
|
if (wrongFrameRange || wrongChannelRange) {
|
||||||
// show dialog again
|
// show dialog again
|
||||||
System.out.println("show dialog again");
|
logger.info("show dialog again");
|
||||||
// TODO reset dialog when possible
|
// TODO reset dialog when possible
|
||||||
gd = new GenericDialog("Range Selection");
|
gd = new GenericDialog("Range Selection");
|
||||||
gd.addMessage("Select frames and channels you want to read");
|
gd.addMessage("Select frames and channels you want to read");
|
||||||
@ -363,7 +363,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
gd.addStringField("Channel selection (start:[step:]end): ",
|
gd.addStringField("Channel selection (start:[step:]end): ",
|
||||||
Integer.toString(minChannelIndex) + ":" + Integer.toString(skipChannelIndex) + ":" + Integer.toString(maxChannelIndex));
|
Integer.toString(minChannelIndex) + ":" + Integer.toString(skipChannelIndex) + ":" + Integer.toString(maxChannelIndex));
|
||||||
gd.showDialog();
|
gd.showDialog();
|
||||||
System.out.println("read ranges again");
|
logger.info("read ranges again");
|
||||||
frameRange = gd.getNextString();
|
frameRange = gd.getNextString();
|
||||||
channelRange = gd.getNextString();
|
channelRange = gd.getNextString();
|
||||||
|
|
||||||
@ -385,7 +385,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
varList.add((Dataset) ds);
|
varList.add((Dataset) ds);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println("The file does not contain a variable " + "with name " + "`" + dsName + "`!");
|
logger.info("The file does not contain a variable " + "with name " + "`" + dsName + "`!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -395,11 +395,11 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
} else if (varList.size() > 1000) {
|
} else if (varList.size() > 1000) {
|
||||||
|
|
||||||
System.out.println("#######");
|
logger.info("#######");
|
||||||
for(Dataset d: varList){
|
for(Dataset d: varList){
|
||||||
System.out.println(d.getFullName());
|
logger.info(d.getFullName());
|
||||||
}
|
}
|
||||||
System.out.println("#######");
|
logger.info("#######");
|
||||||
|
|
||||||
/*-----------------------------------------------------------------
|
/*-----------------------------------------------------------------
|
||||||
* FIXME: quick an dirty hack for files with more than 1000
|
* FIXME: quick an dirty hack for files with more than 1000
|
||||||
@ -448,7 +448,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
varSelections[i] = title;
|
varSelections[i] = title;
|
||||||
defaultValues[i] = false;
|
defaultValues[i] = false;
|
||||||
}
|
}
|
||||||
System.out.println("addcheckboxgroup with " + varList.size() + " rows");
|
logger.info("addcheckboxgroup with " + varList.size() + " rows");
|
||||||
gd.addCheckboxGroup(varList.size(), 1, varSelections, defaultValues);
|
gd.addCheckboxGroup(varList.size(), 1, varSelections, defaultValues);
|
||||||
addScrollBars(gd);
|
addScrollBars(gd);
|
||||||
gd.showDialog();
|
gd.showDialog();
|
||||||
@ -478,12 +478,12 @@ public class HDF5Reader implements PlugIn {
|
|||||||
try {
|
try {
|
||||||
TimeFrame f = groupedVarnames.getFrame(0);
|
TimeFrame f = groupedVarnames.getFrame(0);
|
||||||
if (f == null)
|
if (f == null)
|
||||||
System.out.println("frame is null");
|
logger.info("frame is null");
|
||||||
if (formatTokens.length == 2)
|
if (formatTokens.length == 2)
|
||||||
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]);
|
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]);
|
||||||
else if (formatTokens.length == 3)
|
else if (formatTokens.length == 3)
|
||||||
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]) + formatTokens[2];
|
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]) + formatTokens[2];
|
||||||
System.out.println("VarName: " + dsName);
|
logger.info("VarName: " + dsName);
|
||||||
HObject ds = inFile.get(dsName);
|
HObject ds = inFile.get(dsName);
|
||||||
if (ds != null && ds instanceof Dataset) {
|
if (ds != null && ds instanceof Dataset) {
|
||||||
var = (Dataset) ds;
|
var = (Dataset) ds;
|
||||||
@ -511,7 +511,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
elem_sizes[1] = 1.0;
|
elem_sizes[1] = 1.0;
|
||||||
elem_sizes[2] = 1.0;
|
elem_sizes[2] = 1.0;
|
||||||
} else {
|
} else {
|
||||||
System.out.println("Reading element_size_um");
|
logger.info("Reading element_size_um");
|
||||||
float[] tmp = null;
|
float[] tmp = null;
|
||||||
try {
|
try {
|
||||||
tmp = ((float[]) elemsize_att.getValue());
|
tmp = ((float[]) elemsize_att.getValue());
|
||||||
@ -558,7 +558,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack(nCols, nRows, nFrames * nSlices * nChannels);
|
ImageStack stack = new ImageStack(nCols, nRows, nFrames * nSlices * nChannels);
|
||||||
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
|
logger.info("stackSize: " + Integer.toString(stack.getSize()));
|
||||||
|
|
||||||
ImagePlus imp = new ImagePlus();
|
ImagePlus imp = new ImagePlus();
|
||||||
// to get getFrameIndex() working
|
// to get getFrameIndex() working
|
||||||
@ -575,7 +575,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// get current frame
|
// get current frame
|
||||||
TimeFrame f = groupedVarnames.getFrame(fIdx);
|
TimeFrame f = groupedVarnames.getFrame(fIdx);
|
||||||
if (f == null)
|
if (f == null)
|
||||||
System.out.println("frame is null");
|
logger.info("frame is null");
|
||||||
// get channel indices
|
// get channel indices
|
||||||
|
|
||||||
// TODO: check if frame has same parameters as first,
|
// TODO: check if frame has same parameters as first,
|
||||||
@ -588,7 +588,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
else if (formatTokens.length == 3)
|
else if (formatTokens.length == 3)
|
||||||
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[cIdx]) + formatTokens[2];
|
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[cIdx]) + formatTokens[2];
|
||||||
|
|
||||||
System.out.println("VarName: " + dsName);
|
logger.info("VarName: " + dsName);
|
||||||
|
|
||||||
HObject ds = inFile.get(dsName);
|
HObject ds = inFile.get(dsName);
|
||||||
if (ds != null && ds instanceof Dataset) {
|
if (ds != null && ds instanceof Dataset) {
|
||||||
@ -648,7 +648,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
|
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
|
||||||
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
||||||
} else if (wholeDataset instanceof int[]) {
|
} else if (wholeDataset instanceof int[]) {
|
||||||
System.out.println("Datatype `int` is not supported. " + "Skipping whole frame!");
|
logger.info("Datatype `int` is not supported. " + "Skipping whole frame!");
|
||||||
// int[] tmp = (int[])
|
// int[] tmp = (int[])
|
||||||
// extractSubarray(wholeDataset,
|
// extractSubarray(wholeDataset,
|
||||||
// startIdx,
|
// startIdx,
|
||||||
@ -666,7 +666,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// imp.getStackIndex(cIdx+1,lev+1,fIdx+1));
|
// imp.getStackIndex(cIdx+1,lev+1,fIdx+1));
|
||||||
// }
|
// }
|
||||||
} else if (wholeDataset instanceof long[]) {
|
} else if (wholeDataset instanceof long[]) {
|
||||||
System.out.println("Datatype `long` is not supported. " + "Skipping whole frame!");
|
logger.info("Datatype `long` is not supported. " + "Skipping whole frame!");
|
||||||
// long[] tmp = (long[])
|
// long[] tmp = (long[])
|
||||||
// extractSubarray(wholeDataset,
|
// extractSubarray(wholeDataset,
|
||||||
// startIdx,
|
// startIdx,
|
||||||
@ -687,7 +687,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
|
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
|
||||||
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
||||||
} else if (wholeDataset instanceof double[]) {
|
} else if (wholeDataset instanceof double[]) {
|
||||||
System.out.println("Datatype `double` is not supported. " + "Converting whole frame to `float`!");
|
logger.info("Datatype `double` is not supported. " + "Converting whole frame to `float`!");
|
||||||
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
|
||||||
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
|
||||||
} else {
|
} else {
|
||||||
@ -699,32 +699,32 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
IJ.showProgress(1.f);
|
IJ.showProgress(1.f);
|
||||||
|
|
||||||
System.out.println("Creating image plus");
|
logger.info("Creating image plus");
|
||||||
// stack.trim();
|
// stack.trim();
|
||||||
imp = new ImagePlus(directory + name + ": " + groupedVarnames.getFormatString(), stack);
|
imp = new ImagePlus(directory + name + ": " + groupedVarnames.getFormatString(), stack);
|
||||||
|
|
||||||
imp.setDimensions(nChannels, nSlices, nFrames);
|
imp.setDimensions(nChannels, nSlices, nFrames);
|
||||||
|
|
||||||
if (nChannels > 1) {
|
if (nChannels > 1) {
|
||||||
System.out.println("Creating composite hyperstack with " + Integer.toString(nChannels) + " channels.");
|
logger.info("Creating composite hyperstack with " + Integer.toString(nChannels) + " channels.");
|
||||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||||
} else {
|
} else {
|
||||||
System.out.println("Creating grayscale hyperstack.");
|
logger.info("Creating grayscale hyperstack.");
|
||||||
// imp = new CompositeImage(imp,
|
// imp = new CompositeImage(imp,
|
||||||
// CompositeImage.GRAYSCALE);
|
// CompositeImage.GRAYSCALE);
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println("nFrames: " + Integer.toString(nFrames));
|
logger.info("nFrames: " + Integer.toString(nFrames));
|
||||||
System.out.println("nSlices: " + Integer.toString(nSlices));
|
logger.info("nSlices: " + Integer.toString(nSlices));
|
||||||
|
|
||||||
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
|
logger.info("stackSize: " + Integer.toString(stack.getSize()));
|
||||||
|
|
||||||
// set element_size_um
|
// set element_size_um
|
||||||
imp.getCalibration().pixelDepth = elem_sizes[0];
|
imp.getCalibration().pixelDepth = elem_sizes[0];
|
||||||
imp.getCalibration().pixelHeight = elem_sizes[1];
|
imp.getCalibration().pixelHeight = elem_sizes[1];
|
||||||
imp.getCalibration().pixelWidth = elem_sizes[2];
|
imp.getCalibration().pixelWidth = elem_sizes[2];
|
||||||
|
|
||||||
// System.out.println(" Min = " + minMaxVal[0] +
|
// logger.info(" Min = " + minMaxVal[0] +
|
||||||
// ", Max = " + minMaxVal[1]);
|
// ", Max = " + minMaxVal[1]);
|
||||||
// imp.setDisplayRange(1.5*minMaxVal[0], 0.5*minMaxVal[1]);
|
// imp.setDisplayRange(1.5*minMaxVal[0], 0.5*minMaxVal[1]);
|
||||||
// imp.resetDisplayRange();
|
// imp.resetDisplayRange();
|
||||||
@ -735,12 +735,12 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// channelsIJ[c]);
|
// channelsIJ[c]);
|
||||||
// imp.setSlice(c+1);
|
// imp.setSlice(c+1);
|
||||||
imp.setPosition(c + 1, 1, 1);
|
imp.setPosition(c + 1, 1, 1);
|
||||||
System.out.println("Current channel: " + Integer.toString(imp.getChannel() - 1));
|
logger.info("Current channel: " + Integer.toString(imp.getChannel() - 1));
|
||||||
|
|
||||||
imp.setDisplayRange(minValChannel[c], maxValChannel[c]);
|
imp.setDisplayRange(minValChannel[c], maxValChannel[c]);
|
||||||
// ,
|
// ,
|
||||||
// channelsIJ[c]);
|
// channelsIJ[c]);
|
||||||
System.out.println("Setting display range for channel " + Integer.toString(c) + " (ij idx: " + Integer.toString(channelsIJ[c]) + "): \n\t" + Double.toString(minValChannel[c])
|
logger.info("Setting display range for channel " + Integer.toString(c) + " (ij idx: " + Integer.toString(channelsIJ[c]) + "): \n\t" + Double.toString(minValChannel[c])
|
||||||
+ "/" + Double.toString(maxValChannel[c]));
|
+ "/" + Double.toString(maxValChannel[c]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -763,12 +763,12 @@ public class HDF5Reader implements PlugIn {
|
|||||||
Datatype datatypeIfUnsupported = null;
|
Datatype datatypeIfUnsupported = null;
|
||||||
long[] extent = var.getDims();
|
long[] extent = var.getDims();
|
||||||
|
|
||||||
System.out.println("Reading Variable: " + var.getName());
|
logger.info("Reading Variable: " + var.getName());
|
||||||
System.out.println(" Rank = " + rank + ", Data-type = " + datatype.getDatatypeDescription());
|
logger.info(" Rank = " + rank + ", Data-type = " + datatype.getDatatypeDescription());
|
||||||
System.out.print(" Extent in px (level,row,col):");
|
System.out.print(" Extent in px (level,row,col):");
|
||||||
for (int d = 0; d < rank; ++d)
|
for (int d = 0; d < rank; ++d)
|
||||||
System.out.print(" " + extent[d]);
|
System.out.print(" " + extent[d]);
|
||||||
System.out.println("");
|
logger.info("");
|
||||||
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
|
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
|
||||||
|
|
||||||
Attribute elemsize_att = getAttribute(var, "element_size_um");
|
Attribute elemsize_att = getAttribute(var, "element_size_um");
|
||||||
@ -778,7 +778,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
elem_sizes[1] = 1.0;
|
elem_sizes[1] = 1.0;
|
||||||
elem_sizes[2] = 1.0;
|
elem_sizes[2] = 1.0;
|
||||||
} else {
|
} else {
|
||||||
System.out.println("Reading element_size_um");
|
logger.info("Reading element_size_um");
|
||||||
Object tmp = elemsize_att.getValue();
|
Object tmp = elemsize_att.getValue();
|
||||||
if (tmp instanceof float[]) {
|
if (tmp instanceof float[]) {
|
||||||
elem_sizes[0] = ((float[]) tmp)[0];
|
elem_sizes[0] = ((float[]) tmp)[0];
|
||||||
@ -797,7 +797,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
elem_sizes[2] = 1.0;
|
elem_sizes[2] = 1.0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
System.out.println(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
|
logger.info(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
|
||||||
|
|
||||||
// nice gadget to update the progress bar
|
// nice gadget to update the progress bar
|
||||||
long progressDivisor = extent[0] / 50; // we assume 50 process steps
|
long progressDivisor = extent[0] / 50; // we assume 50 process steps
|
||||||
@ -806,7 +806,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
// check if we have an unsupported datatype
|
// check if we have an unsupported datatype
|
||||||
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
|
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
|
||||||
System.out.println("Datatype not supported by ImageJ");
|
logger.info("Datatype not supported by ImageJ");
|
||||||
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
|
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
|
||||||
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
|
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
|
||||||
typeSelDiag.addMessage("Please select your wanted datatype.\n");
|
typeSelDiag.addMessage("Please select your wanted datatype.\n");
|
||||||
@ -821,11 +821,11 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
int selection = typeSelDiag.getNextChoiceIndex();
|
int selection = typeSelDiag.getNextChoiceIndex();
|
||||||
if (selection == 0) {
|
if (selection == 0) {
|
||||||
System.out.println("float selected");
|
logger.info("float selected");
|
||||||
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
||||||
}
|
}
|
||||||
if (selection == 1) {
|
if (selection == 1) {
|
||||||
System.out.println("short selected");
|
logger.info("short selected");
|
||||||
int typeSizeInByte = 2;
|
int typeSizeInByte = 2;
|
||||||
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
|
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
|
||||||
}
|
}
|
||||||
@ -833,7 +833,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
// read dataset
|
// read dataset
|
||||||
if (rank == 5 && extent[4] == 3) {
|
if (rank == 5 && extent[4] == 3) {
|
||||||
System.out.println(" Detected HyperVolume (type RGB).");
|
logger.info(" Detected HyperVolume (type RGB).");
|
||||||
|
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
|
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
|
||||||
@ -1044,10 +1044,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
int nChannels = 3;
|
int nChannels = 3;
|
||||||
int nSlices = (int) extent[1];
|
int nSlices = (int) extent[1];
|
||||||
int nFrames = (int) extent[0];
|
int nFrames = (int) extent[0];
|
||||||
System.out.println("nFrames: " + Integer.toString(nFrames));
|
logger.info("nFrames: " + Integer.toString(nFrames));
|
||||||
System.out.println("nSlices: " + Integer.toString(nSlices));
|
logger.info("nSlices: " + Integer.toString(nSlices));
|
||||||
|
|
||||||
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
|
logger.info("stackSize: " + Integer.toString(stack.getSize()));
|
||||||
|
|
||||||
imp.setDimensions(nChannels, nSlices, nFrames);
|
imp.setDimensions(nChannels, nSlices, nFrames);
|
||||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||||
@ -1068,7 +1068,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
imp.show();
|
imp.show();
|
||||||
} else if (rank == 4) {
|
} else if (rank == 4) {
|
||||||
if (extent[3] == 3) {
|
if (extent[3] == 3) {
|
||||||
System.out.println(" Detected color Image (type RGB).");
|
logger.info(" Detected color Image (type RGB).");
|
||||||
|
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
|
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
|
||||||
@ -1245,7 +1245,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
imp.updateStatusbarValue();
|
imp.updateStatusbarValue();
|
||||||
} else // we have a HyperVolume
|
} else // we have a HyperVolume
|
||||||
{
|
{
|
||||||
System.out.println(" Detected HyperVolume (type GREYSCALE).");
|
logger.info(" Detected HyperVolume (type GREYSCALE).");
|
||||||
|
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
|
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
|
||||||
@ -1342,11 +1342,11 @@ public class HDF5Reader implements PlugIn {
|
|||||||
int nFrames = (int) extent[0];
|
int nFrames = (int) extent[0];
|
||||||
Integer nFramesI = new Integer(nFrames);
|
Integer nFramesI = new Integer(nFrames);
|
||||||
Integer nSlicesI = new Integer(nSlices);
|
Integer nSlicesI = new Integer(nSlices);
|
||||||
System.out.println("nFrames: " + nFramesI.toString());
|
logger.info("nFrames: " + nFramesI.toString());
|
||||||
System.out.println("nSlices: " + nSlicesI.toString());
|
logger.info("nSlices: " + nSlicesI.toString());
|
||||||
|
|
||||||
Integer myStackSize = new Integer(stack.getSize());
|
Integer myStackSize = new Integer(stack.getSize());
|
||||||
System.out.println("stackSize: " + myStackSize.toString());
|
logger.info("stackSize: " + myStackSize.toString());
|
||||||
|
|
||||||
imp.setDimensions(nChannels, nSlices, nFrames);
|
imp.setDimensions(nChannels, nSlices, nFrames);
|
||||||
imp.setOpenAsHyperStack(true);
|
imp.setOpenAsHyperStack(true);
|
||||||
@ -1360,7 +1360,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
imp.show();
|
imp.show();
|
||||||
}
|
}
|
||||||
} else if (rank == 3 && extent[2] == 3) {
|
} else if (rank == 3 && extent[2] == 3) {
|
||||||
System.out.println("This is an rgb image");
|
logger.info("This is an rgb image");
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack((int) extent[1], (int) extent[0]);
|
ImageStack stack = new ImageStack((int) extent[1], (int) extent[0]);
|
||||||
|
|
||||||
@ -1515,7 +1515,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
imp.show();
|
imp.show();
|
||||||
imp.updateStatusbarValue();
|
imp.updateStatusbarValue();
|
||||||
} else if (rank == 3) {
|
} else if (rank == 3) {
|
||||||
System.out.println("Rank is 3");
|
logger.info("Rank is 3");
|
||||||
|
|
||||||
// create a new image stack and fill in the data
|
// create a new image stack and fill in the data
|
||||||
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
|
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
|
||||||
@ -1605,7 +1605,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
// check if we have an unsupported datatype
|
// check if we have an unsupported datatype
|
||||||
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
|
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
|
||||||
System.out.println("Datatype not supported by ImageJ");
|
logger.info("Datatype not supported by ImageJ");
|
||||||
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
|
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
|
||||||
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
|
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
|
||||||
typeSelDiag.addMessage("Please select your wanted datatype.\n");
|
typeSelDiag.addMessage("Please select your wanted datatype.\n");
|
||||||
@ -1620,11 +1620,11 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
int selection = typeSelDiag.getNextChoiceIndex();
|
int selection = typeSelDiag.getNextChoiceIndex();
|
||||||
if (selection == 0) {
|
if (selection == 0) {
|
||||||
System.out.println("float selected");
|
logger.info("float selected");
|
||||||
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
||||||
}
|
}
|
||||||
if (selection == 1) {
|
if (selection == 1) {
|
||||||
System.out.println("short selected");
|
logger.info("short selected");
|
||||||
int typeSizeInByte = 2;
|
int typeSizeInByte = 2;
|
||||||
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
|
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
|
||||||
}
|
}
|
||||||
@ -1693,7 +1693,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
double imgMax = ips.getMax();
|
double imgMax = ips.getMax();
|
||||||
double imgMin = ips.getMin();
|
double imgMin = ips.getMin();
|
||||||
|
|
||||||
System.out.println(" Min = " + imgMin + ", Max = " + imgMax);
|
logger.info(" Min = " + imgMin + ", Max = " + imgMax);
|
||||||
ips.setMinAndMax(imgMin, imgMax);
|
ips.setMinAndMax(imgMin, imgMax);
|
||||||
imp.updateAndDraw();
|
imp.updateAndDraw();
|
||||||
imp.show();
|
imp.show();
|
||||||
@ -1927,7 +1927,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
} else if (unsignedConvSelec == 1) {
|
} else if (unsignedConvSelec == 1) {
|
||||||
// convert to float
|
// convert to float
|
||||||
if (dataIn instanceof short[]) {
|
if (dataIn instanceof short[]) {
|
||||||
System.out.println("Converting to float");
|
logger.info("Converting to float");
|
||||||
short[] tmpIn = (short[]) dataIn;
|
short[] tmpIn = (short[]) dataIn;
|
||||||
float[] tmp = new float[tmpIn.length];
|
float[] tmp = new float[tmpIn.length];
|
||||||
for (int i = 0; i < tmp.length; i++)
|
for (int i = 0; i < tmp.length; i++)
|
||||||
|
@ -25,9 +25,11 @@ import ij.io.*;
|
|||||||
import ij.plugin.filter.PlugInFilter;
|
import ij.plugin.filter.PlugInFilter;
|
||||||
import ij.process.*;
|
import ij.process.*;
|
||||||
import ij.gui.*;
|
import ij.gui.*;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
import javax.swing.tree.DefaultMutableTreeNode;
|
import javax.swing.tree.DefaultMutableTreeNode;
|
||||||
|
|
||||||
@ -36,6 +38,9 @@ import ncsa.hdf.object.h5.*; // the HDF5 implementation
|
|||||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||||
|
|
||||||
public class HDF5Writer implements PlugInFilter {
|
public class HDF5Writer implements PlugInFilter {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName());
|
||||||
|
|
||||||
private Boolean _batchMode = false;
|
private Boolean _batchMode = false;
|
||||||
private String _batchFileName = null;
|
private String _batchFileName = null;
|
||||||
|
|
||||||
@ -96,7 +101,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
|
|
||||||
// check for hyperstack
|
// check for hyperstack
|
||||||
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
|
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
|
||||||
System.out.println("This is a hyperstack");
|
logger.info("This is a hyperstack");
|
||||||
boolean splitChannels = true;
|
boolean splitChannels = true;
|
||||||
gd.addCheckbox("Split frames and channels", splitChannels);
|
gd.addCheckbox("Split frames and channels", splitChannels);
|
||||||
gd.addStringField(imp.getTitle(), "/t$T/channel$C");
|
gd.addStringField(imp.getTitle(), "/t$T/channel$C");
|
||||||
@ -108,14 +113,14 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
int nRows = imp.getHeight();
|
int nRows = imp.getHeight();
|
||||||
int nCols = imp.getWidth();
|
int nCols = imp.getWidth();
|
||||||
boolean isComposite = imp.isComposite();
|
boolean isComposite = imp.isComposite();
|
||||||
System.out.println("isComposite: " + Boolean.toString(isComposite));
|
logger.info("isComposite: " + Boolean.toString(isComposite));
|
||||||
System.out.println("Saving image \"" + title + "\"");
|
logger.info("Saving image \"" + title + "\"");
|
||||||
System.out.println("nDims: " + Integer.toString(nDims));
|
logger.info("nDims: " + Integer.toString(nDims));
|
||||||
System.out.println("nFrames: " + Integer.toString(nFrames));
|
logger.info("nFrames: " + Integer.toString(nFrames));
|
||||||
System.out.println("nChannels: " + Integer.toString(nChannels));
|
logger.info("nChannels: " + Integer.toString(nChannels));
|
||||||
System.out.println("nSlices: " + Integer.toString(nLevs));
|
logger.info("nSlices: " + Integer.toString(nLevs));
|
||||||
System.out.println("nRows: " + Integer.toString(nRows));
|
logger.info("nRows: " + Integer.toString(nRows));
|
||||||
System.out.println("nCols: " + Integer.toString(nCols));
|
logger.info("nCols: " + Integer.toString(nCols));
|
||||||
gd.showDialog();
|
gd.showDialog();
|
||||||
if (gd.wasCanceled()) {
|
if (gd.wasCanceled()) {
|
||||||
IJ.error("Plugin canceled!");
|
IJ.error("Plugin canceled!");
|
||||||
@ -123,22 +128,22 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
}
|
}
|
||||||
splitChannels = gd.getNextBoolean();
|
splitChannels = gd.getNextBoolean();
|
||||||
String formatString = gd.getNextString();
|
String formatString = gd.getNextString();
|
||||||
System.out.println("formatString: " + formatString);
|
logger.info("formatString: " + formatString);
|
||||||
System.out.println("Bitdepth: " + imp.getBitDepth());
|
logger.info("Bitdepth: " + imp.getBitDepth());
|
||||||
System.out.println("Saving HDF5 File: " + filename);
|
logger.info("Saving HDF5 File: " + filename);
|
||||||
|
|
||||||
int imgColorDepth = imp.getBitDepth();
|
int imgColorDepth = imp.getBitDepth();
|
||||||
int imgColorType = imp.getType();
|
int imgColorType = imp.getType();
|
||||||
Datatype type = null;
|
Datatype type = null;
|
||||||
if (imgColorType == ImagePlus.GRAY8) {
|
if (imgColorType == ImagePlus.GRAY8) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
|
||||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
} else if (imgColorType == ImagePlus.GRAY16) {
|
} else if (imgColorType == ImagePlus.GRAY16) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
|
||||||
int typeSizeInByte = 2;
|
int typeSizeInByte = 2;
|
||||||
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
} else if (imgColorType == ImagePlus.GRAY32) {
|
} else if (imgColorType == ImagePlus.GRAY32) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
|
||||||
// int typeSizeInByte = 4;
|
// int typeSizeInByte = 4;
|
||||||
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
||||||
}
|
}
|
||||||
@ -176,9 +181,9 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
for (int c = 0; c < nChannels; c++) {
|
for (int c = 0; c < nChannels; c++) {
|
||||||
String fullName = makeDataSetName(formatTokens, f, c);
|
String fullName = makeDataSetName(formatTokens, f, c);
|
||||||
String dataSetName = getDataSetDescriptor(fullName);
|
String dataSetName = getDataSetDescriptor(fullName);
|
||||||
System.out.println("dataset name: " + dataSetName);
|
logger.info("dataset name: " + dataSetName);
|
||||||
String groupName = getGroupDescriptor(fullName);
|
String groupName = getGroupDescriptor(fullName);
|
||||||
System.out.println("group name: " + groupName);
|
logger.info("group name: " + groupName);
|
||||||
// ensure group exists
|
// ensure group exists
|
||||||
Group group = createGroupRecursive(groupName, null, outFile);
|
Group group = createGroupRecursive(groupName, null, outFile);
|
||||||
// create data set
|
// create data set
|
||||||
@ -208,14 +213,14 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
// dataet
|
// dataet
|
||||||
// write levels
|
// write levels
|
||||||
|
|
||||||
System.out.println("selected.length: " + Integer.toString(selected.length));
|
logger.info("selected.length: " + Integer.toString(selected.length));
|
||||||
System.out.println("channelDims.length: " + Integer.toString(channelDims.length));
|
logger.info("channelDims.length: " + Integer.toString(channelDims.length));
|
||||||
if (nLevs == 1) {
|
if (nLevs == 1) {
|
||||||
for (int d = 0; d < selected.length; d++) {
|
for (int d = 0; d < selected.length; d++) {
|
||||||
selected[d] = channelDims[d];
|
selected[d] = channelDims[d];
|
||||||
}
|
}
|
||||||
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
|
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
|
||||||
System.out.println("Stackindex: " + Integer.toString(stackIndex));
|
logger.info("Stackindex: " + Integer.toString(stackIndex));
|
||||||
// get raw data
|
// get raw data
|
||||||
Object slice = stack.getPixels(stackIndex);
|
Object slice = stack.getPixels(stackIndex);
|
||||||
assert (slice != null);
|
assert (slice != null);
|
||||||
@ -263,7 +268,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
System.out.println("This is NO hyperstack");
|
logger.info("This is NO hyperstack");
|
||||||
// String title = imp.getTitle();
|
// String title = imp.getTitle();
|
||||||
// int nDims = imp.getNDimensions();
|
// int nDims = imp.getNDimensions();
|
||||||
// int nFrames = imp.getNFrames();
|
// int nFrames = imp.getNFrames();
|
||||||
@ -272,14 +277,14 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
// int nRows = imp.getHeight();
|
// int nRows = imp.getHeight();
|
||||||
// int nCols = imp.getWidth();
|
// int nCols = imp.getWidth();
|
||||||
// boolean isComposite = imp.isComposite() ;
|
// boolean isComposite = imp.isComposite() ;
|
||||||
// System.out.println("isComposite: "+Boolean.toString(isComposite));
|
// logger.info("isComposite: "+Boolean.toString(isComposite));
|
||||||
// System.out.println("Saving image \""+title+"\"");
|
// logger.info("Saving image \""+title+"\"");
|
||||||
// System.out.println("nDims: "+Integer.toString(nDims));
|
// logger.info("nDims: "+Integer.toString(nDims));
|
||||||
// System.out.println("nFrames: "+Integer.toString(nFrames));
|
// logger.info("nFrames: "+Integer.toString(nFrames));
|
||||||
// System.out.println("nChannels: "+Integer.toString(nChannels));
|
// logger.info("nChannels: "+Integer.toString(nChannels));
|
||||||
// System.out.println("nSlices: "+Integer.toString(nLevs));
|
// logger.info("nSlices: "+Integer.toString(nLevs));
|
||||||
// System.out.println("nRows: "+Integer.toString(nRows));
|
// logger.info("nRows: "+Integer.toString(nRows));
|
||||||
// System.out.println("nCols: "+Integer.toString(nCols));
|
// logger.info("nCols: "+Integer.toString(nCols));
|
||||||
|
|
||||||
gd.addStringField(imp.getTitle(), "");
|
gd.addStringField(imp.getTitle(), "");
|
||||||
gd.showDialog();
|
gd.showDialog();
|
||||||
@ -314,12 +319,12 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
int imgColorDepth;
|
int imgColorDepth;
|
||||||
int imgColorType;
|
int imgColorType;
|
||||||
|
|
||||||
System.out.println("writing data to variable: " + varName);
|
logger.info("writing data to variable: " + varName);
|
||||||
|
|
||||||
String dataSetName = getDataSetDescriptor(varName);
|
String dataSetName = getDataSetDescriptor(varName);
|
||||||
System.out.println("dataset name: " + dataSetName);
|
logger.info("dataset name: " + dataSetName);
|
||||||
String groupName = getGroupDescriptor(varName);
|
String groupName = getGroupDescriptor(varName);
|
||||||
System.out.println("group name: " + groupName);
|
logger.info("group name: " + groupName);
|
||||||
|
|
||||||
// ensure group exists
|
// ensure group exists
|
||||||
Group group = createGroupRecursive(groupName, null, outFile);
|
Group group = createGroupRecursive(groupName, null, outFile);
|
||||||
@ -342,7 +347,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
} else {
|
} else {
|
||||||
// color images have 4 dimensions, grey value images
|
// color images have 4 dimensions, grey value images
|
||||||
// have 3.
|
// have 3.
|
||||||
System.out.println("adding 4 dimensions");
|
logger.info("adding 4 dimensions");
|
||||||
dims = new long[4];
|
dims = new long[4];
|
||||||
dims[0] = nLevels;
|
dims[0] = nLevels;
|
||||||
dims[1] = nRows;
|
dims[1] = nRows;
|
||||||
@ -356,7 +361,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
dims[0] = nRows;
|
dims[0] = nRows;
|
||||||
dims[1] = nCols;
|
dims[1] = nCols;
|
||||||
} else {
|
} else {
|
||||||
System.out.println("adding 3 dimensions");
|
logger.info("adding 3 dimensions");
|
||||||
dims = new long[3];
|
dims = new long[3];
|
||||||
dims[0] = nLevels;
|
dims[0] = nLevels;
|
||||||
dims[1] = nRows;
|
dims[1] = nRows;
|
||||||
@ -386,23 +391,23 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
// supported data types
|
// supported data types
|
||||||
// FIXME: set the right signed and precision stuff
|
// FIXME: set the right signed and precision stuff
|
||||||
if (imgColorType == ImagePlus.GRAY8) {
|
if (imgColorType == ImagePlus.GRAY8) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
|
||||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
} else if (imgColorType == ImagePlus.GRAY16) {
|
} else if (imgColorType == ImagePlus.GRAY16) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
|
||||||
int typeSizeInByte = 2;
|
int typeSizeInByte = 2;
|
||||||
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
} else if (imgColorType == ImagePlus.GRAY32) {
|
} else if (imgColorType == ImagePlus.GRAY32) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
|
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
|
||||||
// int typeSizeInByte = 4;
|
// int typeSizeInByte = 4;
|
||||||
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
|
||||||
} else if (imgColorType == ImagePlus.COLOR_RGB) {
|
} else if (imgColorType == ImagePlus.COLOR_RGB) {
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
|
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
|
||||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
} else if (imgColorType == ImagePlus.COLOR_256) {
|
} else if (imgColorType == ImagePlus.COLOR_256) {
|
||||||
// FIXME: not supported yet
|
// FIXME: not supported yet
|
||||||
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_256");
|
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_256");
|
||||||
System.out.println(" ERROR: untested, this might fail.");
|
logger.info(" ERROR: untested, this might fail.");
|
||||||
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -465,7 +470,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
}
|
}
|
||||||
// get pixel sizes
|
// get pixel sizes
|
||||||
ij.measure.Calibration cal = imp.getCalibration();
|
ij.measure.Calibration cal = imp.getCalibration();
|
||||||
System.out.println(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
|
logger.info(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
|
||||||
|
|
||||||
float[] element_sizes = new float[3];
|
float[] element_sizes = new float[3];
|
||||||
element_sizes[0] = (float) cal.pixelDepth;
|
element_sizes[0] = (float) cal.pixelDepth;
|
||||||
@ -545,8 +550,8 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
} else {
|
} else {
|
||||||
String subgroupRelativName = groupRelativName.substring(posOfSlash);
|
String subgroupRelativName = groupRelativName.substring(posOfSlash);
|
||||||
String currentGroup = groupRelativName.substring(0, posOfSlash);
|
String currentGroup = groupRelativName.substring(0, posOfSlash);
|
||||||
System.out.println("Create: " + currentGroup);
|
logger.info("Create: " + currentGroup);
|
||||||
System.out.println("Call back for: " + subgroupRelativName);
|
logger.info("Call back for: " + subgroupRelativName);
|
||||||
try {
|
try {
|
||||||
Group newGroup;
|
Group newGroup;
|
||||||
String newGroupName;
|
String newGroupName;
|
||||||
@ -555,7 +560,7 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
else
|
else
|
||||||
newGroupName = group.getFullName() + "/" + currentGroup;
|
newGroupName = group.getFullName() + "/" + currentGroup;
|
||||||
|
|
||||||
System.out.println("try opening: " + newGroupName);
|
logger.info("try opening: " + newGroupName);
|
||||||
newGroup = (Group) file.get(newGroupName);
|
newGroup = (Group) file.get(newGroupName);
|
||||||
|
|
||||||
if (newGroup == null)
|
if (newGroup == null)
|
||||||
@ -632,11 +637,11 @@ public class HDF5Writer implements PlugInFilter {
|
|||||||
obj = (Metadata) members.get(i);
|
obj = (Metadata) members.get(i);
|
||||||
if (obj instanceof Attribute) {
|
if (obj instanceof Attribute) {
|
||||||
try {
|
try {
|
||||||
System.out.println(((Attribute) obj).getName());
|
logger.info(((Attribute) obj).getName());
|
||||||
attributes.add((Attribute) obj);
|
attributes.add((Attribute) obj);
|
||||||
} catch (java.lang.UnsupportedOperationException e) {
|
} catch (java.lang.UnsupportedOperationException e) {
|
||||||
System.out.println("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
|
logger.info("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
|
||||||
System.out.println(e.getMessage());
|
logger.info(e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user