replaced System.outs with log messages

This commit is contained in:
ebner 2014-03-06 13:41:53 +01:00
parent a5d175bd2e
commit 580c543a5d
4 changed files with 152 additions and 140 deletions

View File

@ -3,10 +3,14 @@ package ch.psi.imagej.hdf5;
import ij.Prefs;
import ij.gui.GenericDialog;
import ij.plugin.PlugIn;
import java.util.logging.Logger;
import java.util.regex.*;
import java.lang.String;
public class HDF5Config implements PlugIn {
private static final Logger logger = Logger.getLogger(HDF5Config.class.getName());
public static String GROUP_VARS_BY_NAME = "HDF5.groupVarsByName";
public static String SHOW_UNMATCHED_DATASET_NAMES = "HDF5.showUnmatchedDataSetNames";
@ -55,7 +59,7 @@ public class HDF5Config implements PlugIn {
}
if (!configDiag.wasOKed()) {
// reset button was pressed
System.out.println("reset button was pressed");
logger.info("reset button was pressed");
// reset all and return a new dialog
configDiag.setVisible(false);
this.run(arg);
@ -64,36 +68,36 @@ public class HDF5Config implements PlugIn {
// get parameters check if they are correct
groupVarsByName = configDiag.getNextBoolean();
System.out.println("groupVarsByName: " + Boolean.toString(groupVarsByName));
logger.info("groupVarsByName: " + Boolean.toString(groupVarsByName));
showUnmatchedDataSetNames = configDiag.getNextBoolean();
System.out.println("showUnmatchedDataSetNames: " + Boolean.toString(showUnmatchedDataSetNames));
logger.info("showUnmatchedDataSetNames: " + Boolean.toString(showUnmatchedDataSetNames));
groupVarsByNameFormatGroup = configDiag.getNextString();
System.out.println("groupVarsByNameFormatGroup: " + groupVarsByNameFormatGroup);
logger.info("groupVarsByNameFormatGroup: " + groupVarsByNameFormatGroup);
groupVarsByNameFormat = configDiag.getNextString();
System.out.println("groupVarsByNameFormat: " + groupVarsByNameFormat);
logger.info("groupVarsByNameFormat: " + groupVarsByNameFormat);
// dollarRegexpForGrouping = configDiag.getNextString();
// System.out.println("dollarRegexpForGrouping: " +
// logger.info("dollarRegexpForGrouping: " +
// dollarRegexpForGrouping);
try {
String[] formatTokens = HDF5GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
for (int i = 0; i < formatTokens.length; i++) {
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
logger.info("tok " + Integer.toString(i) + " : " + formatTokens[i]);
}
} catch (PatternSyntaxException e) {
// produce an error dialog an start over
String errMsg = e.getMessage();
System.out.println(errMsg);
logger.info(errMsg);
// reset all and return a new dialog
configDiag.setVisible(false);
this.run(arg);
return;
}
System.out.println("Saving...");
logger.info("Saving...");
// all OK and "Save" was pressed, so save it...
Prefs.set(GROUP_VARS_BY_NAME, groupVarsByName);
@ -146,7 +150,7 @@ public class HDF5Config implements PlugIn {
String dollarRegexpForGrouping = "[0-9]+"; // default
return dollarRegexpForGrouping;
} else {
System.out.println("No default value for key: " + key);
logger.info("No default value for key: " + key);
return null;
}
}

View File

@ -1,5 +1,6 @@
package ch.psi.imagej.hdf5;
import java.util.logging.Logger;
import java.util.regex.*;
import java.util.ArrayList;
import java.util.Arrays;
@ -8,6 +9,8 @@ import java.util.List;
public class HDF5GroupedVarnames {
private static final Logger logger = Logger.getLogger(HDF5GroupedVarnames.class.getName());
private final List<String> matchedVarNames = new ArrayList<String>();
private final List<String> unMatchedVarNames = new ArrayList<String>();
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
@ -26,7 +29,7 @@ public class HDF5GroupedVarnames {
boolean rightOrderOfFormatVars = groupVarsByNameFormat.indexOf("$T") < groupVarsByNameFormat.indexOf("$C");
for (int i = 0; i < formatTokens.length; i++) {
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
logger.info("tok " + Integer.toString(i) + " : " + formatTokens[i]);
}
if (formatTokens.length < 2 || !containsFormatVars || !rightOrderOfFormatVars) {
throw new PatternSyntaxException("Your format string has errors. " + "You must provide $T and $C and " + "also in correct order!", groupVarsByNameFormat, -1);
@ -34,7 +37,7 @@ public class HDF5GroupedVarnames {
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
System.out.println(regexp);
logger.info(regexp);
// check if we have a regexp;
Pattern.compile(regexp);
return formatTokens;
@ -48,14 +51,14 @@ public class HDF5GroupedVarnames {
} catch (PatternSyntaxException e) {
// produce an error dialog an start over
String errMsg = e.getMessage();
System.out.println(errMsg);
logger.info(errMsg);
return;
}
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
System.out.println(regexp);
logger.info(regexp);
// check if we have a regexp;
Pattern p = null;
p = Pattern.compile(regexp);
@ -66,7 +69,7 @@ public class HDF5GroupedVarnames {
Matcher m = p.matcher(varNames[i]);
boolean b = m.matches();
if (b) {
System.out.println(varNames[i]);
logger.info(varNames[i]);
matchedVarNames.add(varNames[i]);
} else {
unMatchedVarNames.add(varNames[i]);
@ -101,14 +104,14 @@ public class HDF5GroupedVarnames {
}
if (tokens.length < 2 || tokens.length > 3) {
System.out.println("Error parsing varname!");
logger.info("Error parsing varname!");
} else {
Integer channelIndex = new Integer(tokens[1]);
System.out.println("channelIndex: " + channelIndex.toString());
System.out.println("left token: " + tokens[0]);
logger.info("channelIndex: " + channelIndex.toString());
logger.info("left token: " + tokens[0]);
tokens = tokens[0].split("/t");
Integer frameIndex = new Integer(tokens[1]);
System.out.println("frameIndex: " + frameIndex.toString());
logger.info("frameIndex: " + frameIndex.toString());
if (minFrameIndex == -1)
minFrameIndex = frameIndex.intValue();
@ -135,7 +138,7 @@ public class HDF5GroupedVarnames {
frame.addChannel(channelIndex.intValue());
frameList.add(frame);
}
// System.out.println(frame.toString());
// logger.info(frame.toString());
}
}
}
@ -212,8 +215,8 @@ public class HDF5GroupedVarnames {
}
public void setFrameAndChannelRange(int minFrame, int skipFrame, int maxFrame, int minChannel, int skipChannel, int maxChannel) {
System.out.println("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
System.out.println("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
logger.info("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
logger.info("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
if (hasAllFramesInRange()) {
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
@ -225,7 +228,7 @@ public class HDF5GroupedVarnames {
TimeFrame frame = new TimeFrame(frameAllChannels.getFrameIndex());
// TODO remove unwanted channels
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// System.out.println("Adding channels: " +
// logger.info("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
@ -235,9 +238,9 @@ public class HDF5GroupedVarnames {
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
logger.info("Adding nChannels: " + Integer.toString(nChannels));
} else {
System.out.println("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
logger.info("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
// clear frames
@ -246,12 +249,12 @@ public class HDF5GroupedVarnames {
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
TimeFrame frame = new TimeFrame(f);
int idx = completeFrameList.indexOf(frame);
// System.out.println("index of frame in list: " +
// logger.info("index of frame in list: " +
// Integer.toString(idx));
if (idx != -1) {
// TODO remove unwanted channels
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// System.out.println("Adding channels: " +
// logger.info("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
@ -259,12 +262,12 @@ public class HDF5GroupedVarnames {
// nChannels = frame.getNChannels();
frameList.add(frame);
} else {
System.out.println("Timestep " + Integer.toString(f) + " is missing!");
logger.info("Timestep " + Integer.toString(f) + " is missing!");
}
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
System.out.println("Adding nChannels: " + Integer.toString(nChannels));
logger.info("Adding nChannels: " + Integer.toString(nChannels));
}
}
}

View File

@ -134,18 +134,18 @@ public class HDF5Reader implements PlugIn {
attrName = attrSelecD.getNextChoice();
}
System.out.println("Reading attribute");
logger.info("Reading attribute");
Attribute attr = getAttribute(gr, attrName);
System.out.println("Reading attribute is ok");
logger.info("Reading attribute is ok");
if (attr != null)
System.out.println("attr is not null");
System.out.println("attr.getName(): " + attr.getName());
logger.info("attr is not null");
logger.info("attr.getName(): " + attr.getName());
Datatype dType = attr.getType();
System.out.println(dType.getDatatypeDescription());
logger.info(dType.getDatatypeDescription());
Object tmp = attr.getValue();
if (tmp != null)
System.out.println("get value is ok");
logger.info("get value is ok");
if (tmp instanceof String) {
// we have a string
groupVarsByNameFormat = (String) tmp;
@ -156,14 +156,14 @@ public class HDF5Reader implements PlugIn {
for (int i = 0; i < sTmp.length; i++)
groupVarsByNameFormat = groupVarsByNameFormat + sTmp[i];
}
System.out.println("File has format string for grouping: " + groupVarsByNameFormat);
logger.info("File has format string for grouping: " + groupVarsByNameFormat);
} else {
System.out.println("File has no format string for grouping" + ", using default");
logger.info("File has no format string for grouping" + ", using default");
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
}
} catch (Exception e) {
System.out.println("Error occured read format string " + "for grouping, using default");
logger.info("Error occured read format string " + "for grouping, using default");
groupVarsByNameFormat = HDF5Config.getDefaultValue(HDF5Config.GROUP_VARS_BY_NAME_FORMAT);
groupVarsByNameFormat = Prefs.get(HDF5Config.GROUP_VARS_BY_NAME_FORMAT, groupVarsByNameFormat);
}
@ -208,7 +208,7 @@ public class HDF5Reader implements PlugIn {
varNames[i] = varList.get(i).getFullName();
}
groupedVarnames.parseVarNames(varNames, groupVarsByNameFormat, dollarRegexpForGrouping);
System.out.println(groupedVarnames.toString());
logger.info(groupedVarnames.toString());
// make the data set selection dialog
minFrameIndex = groupedVarnames.getMinFrameIndex();
@ -249,7 +249,7 @@ public class HDF5Reader implements PlugIn {
varSelections[i] = title;
defaultValues[i] = false;
}
System.out.println("addcheckboxgroup with " + unmatchedVarNames.size() + " rows");
logger.info("addcheckboxgroup with " + unmatchedVarNames.size() + " rows");
gd.addCheckboxGroup(unmatchedVarNames.size(), 1, varSelections, defaultValues);
addScrollBars(gd);
}
@ -282,7 +282,7 @@ public class HDF5Reader implements PlugIn {
if (frameRangeToks.length == 1) {
// single frame
try {
System.out.println("single frame");
logger.info("single frame");
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
maxFrameIndex = minFrameIndex;
wrongFrameRange = false;
@ -292,7 +292,7 @@ public class HDF5Reader implements PlugIn {
} else if (frameRangeToks.length == 2) {
// frame range with skipFrameIndex=1
try {
System.out.println("frame range with skipFrameIndex=1");
logger.info("frame range with skipFrameIndex=1");
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
maxFrameIndex = Integer.parseInt(frameRangeToks[1]);
wrongFrameRange = false;
@ -302,7 +302,7 @@ public class HDF5Reader implements PlugIn {
} else if (frameRangeToks.length == 3) {
// frame range with skipFrameIndex
try {
System.out.println("frame range with skipFrameIndex");
logger.info("frame range with skipFrameIndex");
minFrameIndex = Integer.parseInt(frameRangeToks[0]);
skipFrameIndex = Integer.parseInt(frameRangeToks[1]);
maxFrameIndex = Integer.parseInt(frameRangeToks[2]);
@ -312,7 +312,7 @@ public class HDF5Reader implements PlugIn {
}
} else {
// wrong format
System.out.println("wrong format");
logger.info("wrong format");
wrongFrameRange = true;
}
@ -352,7 +352,7 @@ public class HDF5Reader implements PlugIn {
}
if (wrongFrameRange || wrongChannelRange) {
// show dialog again
System.out.println("show dialog again");
logger.info("show dialog again");
// TODO reset dialog when possible
gd = new GenericDialog("Range Selection");
gd.addMessage("Select frames and channels you want to read");
@ -363,7 +363,7 @@ public class HDF5Reader implements PlugIn {
gd.addStringField("Channel selection (start:[step:]end): ",
Integer.toString(minChannelIndex) + ":" + Integer.toString(skipChannelIndex) + ":" + Integer.toString(maxChannelIndex));
gd.showDialog();
System.out.println("read ranges again");
logger.info("read ranges again");
frameRange = gd.getNextString();
channelRange = gd.getNextString();
@ -385,7 +385,7 @@ public class HDF5Reader implements PlugIn {
varList.add((Dataset) ds);
}
} catch (Exception e) {
System.out.println("The file does not contain a variable " + "with name " + "`" + dsName + "`!");
logger.info("The file does not contain a variable " + "with name " + "`" + dsName + "`!");
}
}
} else {
@ -395,11 +395,11 @@ public class HDF5Reader implements PlugIn {
} else if (varList.size() > 1000) {
System.out.println("#######");
logger.info("#######");
for(Dataset d: varList){
System.out.println(d.getFullName());
logger.info(d.getFullName());
}
System.out.println("#######");
logger.info("#######");
/*-----------------------------------------------------------------
* FIXME: quick an dirty hack for files with more than 1000
@ -448,7 +448,7 @@ public class HDF5Reader implements PlugIn {
varSelections[i] = title;
defaultValues[i] = false;
}
System.out.println("addcheckboxgroup with " + varList.size() + " rows");
logger.info("addcheckboxgroup with " + varList.size() + " rows");
gd.addCheckboxGroup(varList.size(), 1, varSelections, defaultValues);
addScrollBars(gd);
gd.showDialog();
@ -478,12 +478,12 @@ public class HDF5Reader implements PlugIn {
try {
TimeFrame f = groupedVarnames.getFrame(0);
if (f == null)
System.out.println("frame is null");
logger.info("frame is null");
if (formatTokens.length == 2)
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]);
else if (formatTokens.length == 3)
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[0]) + formatTokens[2];
System.out.println("VarName: " + dsName);
logger.info("VarName: " + dsName);
HObject ds = inFile.get(dsName);
if (ds != null && ds instanceof Dataset) {
var = (Dataset) ds;
@ -511,7 +511,7 @@ public class HDF5Reader implements PlugIn {
elem_sizes[1] = 1.0;
elem_sizes[2] = 1.0;
} else {
System.out.println("Reading element_size_um");
logger.info("Reading element_size_um");
float[] tmp = null;
try {
tmp = ((float[]) elemsize_att.getValue());
@ -558,7 +558,7 @@ public class HDF5Reader implements PlugIn {
// create a new image stack and fill in the data
ImageStack stack = new ImageStack(nCols, nRows, nFrames * nSlices * nChannels);
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
logger.info("stackSize: " + Integer.toString(stack.getSize()));
ImagePlus imp = new ImagePlus();
// to get getFrameIndex() working
@ -575,7 +575,7 @@ public class HDF5Reader implements PlugIn {
// get current frame
TimeFrame f = groupedVarnames.getFrame(fIdx);
if (f == null)
System.out.println("frame is null");
logger.info("frame is null");
// get channel indices
// TODO: check if frame has same parameters as first,
@ -588,7 +588,7 @@ public class HDF5Reader implements PlugIn {
else if (formatTokens.length == 3)
dsName = formatTokens[0] + Integer.toString(f.getFrameIndex()) + formatTokens[1] + Integer.toString(f.getChannelIndices()[cIdx]) + formatTokens[2];
System.out.println("VarName: " + dsName);
logger.info("VarName: " + dsName);
HObject ds = inFile.get(dsName);
if (ds != null && ds instanceof Dataset) {
@ -648,7 +648,7 @@ public class HDF5Reader implements PlugIn {
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
} else if (wholeDataset instanceof int[]) {
System.out.println("Datatype `int` is not supported. " + "Skipping whole frame!");
logger.info("Datatype `int` is not supported. " + "Skipping whole frame!");
// int[] tmp = (int[])
// extractSubarray(wholeDataset,
// startIdx,
@ -666,7 +666,7 @@ public class HDF5Reader implements PlugIn {
// imp.getStackIndex(cIdx+1,lev+1,fIdx+1));
// }
} else if (wholeDataset instanceof long[]) {
System.out.println("Datatype `long` is not supported. " + "Skipping whole frame!");
logger.info("Datatype `long` is not supported. " + "Skipping whole frame!");
// long[] tmp = (long[])
// extractSubarray(wholeDataset,
// startIdx,
@ -687,7 +687,7 @@ public class HDF5Reader implements PlugIn {
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
} else if (wholeDataset instanceof double[]) {
System.out.println("Datatype `double` is not supported. " + "Converting whole frame to `float`!");
logger.info("Datatype `double` is not supported. " + "Converting whole frame to `float`!");
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
stack.setPixels(tmp, imp.getStackIndex(cIdx + 1, lev + 1, fIdx + 1));
} else {
@ -699,32 +699,32 @@ public class HDF5Reader implements PlugIn {
}
IJ.showProgress(1.f);
System.out.println("Creating image plus");
logger.info("Creating image plus");
// stack.trim();
imp = new ImagePlus(directory + name + ": " + groupedVarnames.getFormatString(), stack);
imp.setDimensions(nChannels, nSlices, nFrames);
if (nChannels > 1) {
System.out.println("Creating composite hyperstack with " + Integer.toString(nChannels) + " channels.");
logger.info("Creating composite hyperstack with " + Integer.toString(nChannels) + " channels.");
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
} else {
System.out.println("Creating grayscale hyperstack.");
logger.info("Creating grayscale hyperstack.");
// imp = new CompositeImage(imp,
// CompositeImage.GRAYSCALE);
}
System.out.println("nFrames: " + Integer.toString(nFrames));
System.out.println("nSlices: " + Integer.toString(nSlices));
logger.info("nFrames: " + Integer.toString(nFrames));
logger.info("nSlices: " + Integer.toString(nSlices));
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
logger.info("stackSize: " + Integer.toString(stack.getSize()));
// set element_size_um
imp.getCalibration().pixelDepth = elem_sizes[0];
imp.getCalibration().pixelHeight = elem_sizes[1];
imp.getCalibration().pixelWidth = elem_sizes[2];
// System.out.println(" Min = " + minMaxVal[0] +
// logger.info(" Min = " + minMaxVal[0] +
// ", Max = " + minMaxVal[1]);
// imp.setDisplayRange(1.5*minMaxVal[0], 0.5*minMaxVal[1]);
// imp.resetDisplayRange();
@ -735,12 +735,12 @@ public class HDF5Reader implements PlugIn {
// channelsIJ[c]);
// imp.setSlice(c+1);
imp.setPosition(c + 1, 1, 1);
System.out.println("Current channel: " + Integer.toString(imp.getChannel() - 1));
logger.info("Current channel: " + Integer.toString(imp.getChannel() - 1));
imp.setDisplayRange(minValChannel[c], maxValChannel[c]);
// ,
// channelsIJ[c]);
System.out.println("Setting display range for channel " + Integer.toString(c) + " (ij idx: " + Integer.toString(channelsIJ[c]) + "): \n\t" + Double.toString(minValChannel[c])
logger.info("Setting display range for channel " + Integer.toString(c) + " (ij idx: " + Integer.toString(channelsIJ[c]) + "): \n\t" + Double.toString(minValChannel[c])
+ "/" + Double.toString(maxValChannel[c]));
}
@ -763,12 +763,12 @@ public class HDF5Reader implements PlugIn {
Datatype datatypeIfUnsupported = null;
long[] extent = var.getDims();
System.out.println("Reading Variable: " + var.getName());
System.out.println(" Rank = " + rank + ", Data-type = " + datatype.getDatatypeDescription());
logger.info("Reading Variable: " + var.getName());
logger.info(" Rank = " + rank + ", Data-type = " + datatype.getDatatypeDescription());
System.out.print(" Extent in px (level,row,col):");
for (int d = 0; d < rank; ++d)
System.out.print(" " + extent[d]);
System.out.println("");
logger.info("");
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
Attribute elemsize_att = getAttribute(var, "element_size_um");
@ -778,7 +778,7 @@ public class HDF5Reader implements PlugIn {
elem_sizes[1] = 1.0;
elem_sizes[2] = 1.0;
} else {
System.out.println("Reading element_size_um");
logger.info("Reading element_size_um");
Object tmp = elemsize_att.getValue();
if (tmp instanceof float[]) {
elem_sizes[0] = ((float[]) tmp)[0];
@ -797,7 +797,7 @@ public class HDF5Reader implements PlugIn {
elem_sizes[2] = 1.0;
}
}
System.out.println(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
logger.info(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
// nice gadget to update the progress bar
long progressDivisor = extent[0] / 50; // we assume 50 process steps
@ -806,7 +806,7 @@ public class HDF5Reader implements PlugIn {
// check if we have an unsupported datatype
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
System.out.println("Datatype not supported by ImageJ");
logger.info("Datatype not supported by ImageJ");
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
typeSelDiag.addMessage("Please select your wanted datatype.\n");
@ -821,11 +821,11 @@ public class HDF5Reader implements PlugIn {
}
int selection = typeSelDiag.getNextChoiceIndex();
if (selection == 0) {
System.out.println("float selected");
logger.info("float selected");
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
}
if (selection == 1) {
System.out.println("short selected");
logger.info("short selected");
int typeSizeInByte = 2;
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
}
@ -833,7 +833,7 @@ public class HDF5Reader implements PlugIn {
// read dataset
if (rank == 5 && extent[4] == 3) {
System.out.println(" Detected HyperVolume (type RGB).");
logger.info(" Detected HyperVolume (type RGB).");
// create a new image stack and fill in the data
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
@ -1044,10 +1044,10 @@ public class HDF5Reader implements PlugIn {
int nChannels = 3;
int nSlices = (int) extent[1];
int nFrames = (int) extent[0];
System.out.println("nFrames: " + Integer.toString(nFrames));
System.out.println("nSlices: " + Integer.toString(nSlices));
logger.info("nFrames: " + Integer.toString(nFrames));
logger.info("nSlices: " + Integer.toString(nSlices));
System.out.println("stackSize: " + Integer.toString(stack.getSize()));
logger.info("stackSize: " + Integer.toString(stack.getSize()));
imp.setDimensions(nChannels, nSlices, nFrames);
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
@ -1068,7 +1068,7 @@ public class HDF5Reader implements PlugIn {
imp.show();
} else if (rank == 4) {
if (extent[3] == 3) {
System.out.println(" Detected color Image (type RGB).");
logger.info(" Detected color Image (type RGB).");
// create a new image stack and fill in the data
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
@ -1245,7 +1245,7 @@ public class HDF5Reader implements PlugIn {
imp.updateStatusbarValue();
} else // we have a HyperVolume
{
System.out.println(" Detected HyperVolume (type GREYSCALE).");
logger.info(" Detected HyperVolume (type GREYSCALE).");
// create a new image stack and fill in the data
ImageStack stack = new ImageStack((int) extent[3], (int) extent[2]);
@ -1342,11 +1342,11 @@ public class HDF5Reader implements PlugIn {
int nFrames = (int) extent[0];
Integer nFramesI = new Integer(nFrames);
Integer nSlicesI = new Integer(nSlices);
System.out.println("nFrames: " + nFramesI.toString());
System.out.println("nSlices: " + nSlicesI.toString());
logger.info("nFrames: " + nFramesI.toString());
logger.info("nSlices: " + nSlicesI.toString());
Integer myStackSize = new Integer(stack.getSize());
System.out.println("stackSize: " + myStackSize.toString());
logger.info("stackSize: " + myStackSize.toString());
imp.setDimensions(nChannels, nSlices, nFrames);
imp.setOpenAsHyperStack(true);
@ -1360,7 +1360,7 @@ public class HDF5Reader implements PlugIn {
imp.show();
}
} else if (rank == 3 && extent[2] == 3) {
System.out.println("This is an rgb image");
logger.info("This is an rgb image");
// create a new image stack and fill in the data
ImageStack stack = new ImageStack((int) extent[1], (int) extent[0]);
@ -1515,7 +1515,7 @@ public class HDF5Reader implements PlugIn {
imp.show();
imp.updateStatusbarValue();
} else if (rank == 3) {
System.out.println("Rank is 3");
logger.info("Rank is 3");
// create a new image stack and fill in the data
ImageStack stack = new ImageStack((int) extent[2], (int) extent[1]);
@ -1605,7 +1605,7 @@ public class HDF5Reader implements PlugIn {
// check if we have an unsupported datatype
if (datatype.getDatatypeClass() == Datatype.CLASS_INTEGER && (datatype.getDatatypeSize() == 4 || datatype.getDatatypeSize() == 8)) {
System.out.println("Datatype not supported by ImageJ");
logger.info("Datatype not supported by ImageJ");
GenericDialog typeSelDiag = new GenericDialog("Datatype Selection");
typeSelDiag.addMessage("The datatype `" + datatype.getDatatypeDescription() + "` is not supported by ImageJ.\n\n");
typeSelDiag.addMessage("Please select your wanted datatype.\n");
@ -1620,11 +1620,11 @@ public class HDF5Reader implements PlugIn {
}
int selection = typeSelDiag.getNextChoiceIndex();
if (selection == 0) {
System.out.println("float selected");
logger.info("float selected");
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
}
if (selection == 1) {
System.out.println("short selected");
logger.info("short selected");
int typeSizeInByte = 2;
datatypeIfUnsupported = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, -1);
}
@ -1693,7 +1693,7 @@ public class HDF5Reader implements PlugIn {
double imgMax = ips.getMax();
double imgMin = ips.getMin();
System.out.println(" Min = " + imgMin + ", Max = " + imgMax);
logger.info(" Min = " + imgMin + ", Max = " + imgMax);
ips.setMinAndMax(imgMin, imgMax);
imp.updateAndDraw();
imp.show();
@ -1927,7 +1927,7 @@ public class HDF5Reader implements PlugIn {
} else if (unsignedConvSelec == 1) {
// convert to float
if (dataIn instanceof short[]) {
System.out.println("Converting to float");
logger.info("Converting to float");
short[] tmpIn = (short[]) dataIn;
float[] tmp = new float[tmpIn.length];
for (int i = 0; i < tmp.length; i++)

View File

@ -25,9 +25,11 @@ import ij.io.*;
import ij.plugin.filter.PlugInFilter;
import ij.process.*;
import ij.gui.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger;
import javax.swing.tree.DefaultMutableTreeNode;
@ -36,6 +38,9 @@ import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5Writer implements PlugInFilter {
private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName());
private Boolean _batchMode = false;
private String _batchFileName = null;
@ -96,7 +101,7 @@ public class HDF5Writer implements PlugInFilter {
// check for hyperstack
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
System.out.println("This is a hyperstack");
logger.info("This is a hyperstack");
boolean splitChannels = true;
gd.addCheckbox("Split frames and channels", splitChannels);
gd.addStringField(imp.getTitle(), "/t$T/channel$C");
@ -108,14 +113,14 @@ public class HDF5Writer implements PlugInFilter {
int nRows = imp.getHeight();
int nCols = imp.getWidth();
boolean isComposite = imp.isComposite();
System.out.println("isComposite: " + Boolean.toString(isComposite));
System.out.println("Saving image \"" + title + "\"");
System.out.println("nDims: " + Integer.toString(nDims));
System.out.println("nFrames: " + Integer.toString(nFrames));
System.out.println("nChannels: " + Integer.toString(nChannels));
System.out.println("nSlices: " + Integer.toString(nLevs));
System.out.println("nRows: " + Integer.toString(nRows));
System.out.println("nCols: " + Integer.toString(nCols));
logger.info("isComposite: " + Boolean.toString(isComposite));
logger.info("Saving image \"" + title + "\"");
logger.info("nDims: " + Integer.toString(nDims));
logger.info("nFrames: " + Integer.toString(nFrames));
logger.info("nChannels: " + Integer.toString(nChannels));
logger.info("nSlices: " + Integer.toString(nLevs));
logger.info("nRows: " + Integer.toString(nRows));
logger.info("nCols: " + Integer.toString(nCols));
gd.showDialog();
if (gd.wasCanceled()) {
IJ.error("Plugin canceled!");
@ -123,22 +128,22 @@ public class HDF5Writer implements PlugInFilter {
}
splitChannels = gd.getNextBoolean();
String formatString = gd.getNextString();
System.out.println("formatString: " + formatString);
System.out.println("Bitdepth: " + imp.getBitDepth());
System.out.println("Saving HDF5 File: " + filename);
logger.info("formatString: " + formatString);
logger.info("Bitdepth: " + imp.getBitDepth());
logger.info("Saving HDF5 File: " + filename);
int imgColorDepth = imp.getBitDepth();
int imgColorType = imp.getType();
Datatype type = null;
if (imgColorType == ImagePlus.GRAY8) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
int typeSizeInByte = 2;
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
// int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
}
@ -176,9 +181,9 @@ public class HDF5Writer implements PlugInFilter {
for (int c = 0; c < nChannels; c++) {
String fullName = makeDataSetName(formatTokens, f, c);
String dataSetName = getDataSetDescriptor(fullName);
System.out.println("dataset name: " + dataSetName);
logger.info("dataset name: " + dataSetName);
String groupName = getGroupDescriptor(fullName);
System.out.println("group name: " + groupName);
logger.info("group name: " + groupName);
// ensure group exists
Group group = createGroupRecursive(groupName, null, outFile);
// create data set
@ -208,14 +213,14 @@ public class HDF5Writer implements PlugInFilter {
// dataet
// write levels
System.out.println("selected.length: " + Integer.toString(selected.length));
System.out.println("channelDims.length: " + Integer.toString(channelDims.length));
logger.info("selected.length: " + Integer.toString(selected.length));
logger.info("channelDims.length: " + Integer.toString(channelDims.length));
if (nLevs == 1) {
for (int d = 0; d < selected.length; d++) {
selected[d] = channelDims[d];
}
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
System.out.println("Stackindex: " + Integer.toString(stackIndex));
logger.info("Stackindex: " + Integer.toString(stackIndex));
// get raw data
Object slice = stack.getPixels(stackIndex);
assert (slice != null);
@ -263,7 +268,7 @@ public class HDF5Writer implements PlugInFilter {
return;
}
} else {
System.out.println("This is NO hyperstack");
logger.info("This is NO hyperstack");
// String title = imp.getTitle();
// int nDims = imp.getNDimensions();
// int nFrames = imp.getNFrames();
@ -272,14 +277,14 @@ public class HDF5Writer implements PlugInFilter {
// int nRows = imp.getHeight();
// int nCols = imp.getWidth();
// boolean isComposite = imp.isComposite() ;
// System.out.println("isComposite: "+Boolean.toString(isComposite));
// System.out.println("Saving image \""+title+"\"");
// System.out.println("nDims: "+Integer.toString(nDims));
// System.out.println("nFrames: "+Integer.toString(nFrames));
// System.out.println("nChannels: "+Integer.toString(nChannels));
// System.out.println("nSlices: "+Integer.toString(nLevs));
// System.out.println("nRows: "+Integer.toString(nRows));
// System.out.println("nCols: "+Integer.toString(nCols));
// logger.info("isComposite: "+Boolean.toString(isComposite));
// logger.info("Saving image \""+title+"\"");
// logger.info("nDims: "+Integer.toString(nDims));
// logger.info("nFrames: "+Integer.toString(nFrames));
// logger.info("nChannels: "+Integer.toString(nChannels));
// logger.info("nSlices: "+Integer.toString(nLevs));
// logger.info("nRows: "+Integer.toString(nRows));
// logger.info("nCols: "+Integer.toString(nCols));
gd.addStringField(imp.getTitle(), "");
gd.showDialog();
@ -314,12 +319,12 @@ public class HDF5Writer implements PlugInFilter {
int imgColorDepth;
int imgColorType;
System.out.println("writing data to variable: " + varName);
logger.info("writing data to variable: " + varName);
String dataSetName = getDataSetDescriptor(varName);
System.out.println("dataset name: " + dataSetName);
logger.info("dataset name: " + dataSetName);
String groupName = getGroupDescriptor(varName);
System.out.println("group name: " + groupName);
logger.info("group name: " + groupName);
// ensure group exists
Group group = createGroupRecursive(groupName, null, outFile);
@ -342,7 +347,7 @@ public class HDF5Writer implements PlugInFilter {
} else {
// color images have 4 dimensions, grey value images
// have 3.
System.out.println("adding 4 dimensions");
logger.info("adding 4 dimensions");
dims = new long[4];
dims[0] = nLevels;
dims[1] = nRows;
@ -356,7 +361,7 @@ public class HDF5Writer implements PlugInFilter {
dims[0] = nRows;
dims[1] = nCols;
} else {
System.out.println("adding 3 dimensions");
logger.info("adding 3 dimensions");
dims = new long[3];
dims[0] = nLevels;
dims[1] = nRows;
@ -386,23 +391,23 @@ public class HDF5Writer implements PlugInFilter {
// supported data types
// FIXME: set the right signed and precision stuff
if (imgColorType == ImagePlus.GRAY8) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY8");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY16) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY16");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY16");
int typeSizeInByte = 2;
type = new H5Datatype(Datatype.CLASS_INTEGER, typeSizeInByte, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.GRAY32) {
System.out.println(" bit depth: " + imgColorDepth + ", type: GRAY32");
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY32");
// int typeSizeInByte = 4;
type = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
} else if (imgColorType == ImagePlus.COLOR_RGB) {
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
} else if (imgColorType == ImagePlus.COLOR_256) {
// FIXME: not supported yet
System.out.println(" bit depth: " + imgColorDepth + ", type: COLOR_256");
System.out.println(" ERROR: untested, this might fail.");
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_256");
logger.info(" ERROR: untested, this might fail.");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
}
@ -465,7 +470,7 @@ public class HDF5Writer implements PlugInFilter {
}
// get pixel sizes
ij.measure.Calibration cal = imp.getCalibration();
System.out.println(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
logger.info(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
float[] element_sizes = new float[3];
element_sizes[0] = (float) cal.pixelDepth;
@ -545,8 +550,8 @@ public class HDF5Writer implements PlugInFilter {
} else {
String subgroupRelativName = groupRelativName.substring(posOfSlash);
String currentGroup = groupRelativName.substring(0, posOfSlash);
System.out.println("Create: " + currentGroup);
System.out.println("Call back for: " + subgroupRelativName);
logger.info("Create: " + currentGroup);
logger.info("Call back for: " + subgroupRelativName);
try {
Group newGroup;
String newGroupName;
@ -555,7 +560,7 @@ public class HDF5Writer implements PlugInFilter {
else
newGroupName = group.getFullName() + "/" + currentGroup;
System.out.println("try opening: " + newGroupName);
logger.info("try opening: " + newGroupName);
newGroup = (Group) file.get(newGroupName);
if (newGroup == null)
@ -632,11 +637,11 @@ public class HDF5Writer implements PlugInFilter {
obj = (Metadata) members.get(i);
if (obj instanceof Attribute) {
try {
System.out.println(((Attribute) obj).getName());
logger.info(((Attribute) obj).getName());
attributes.add((Attribute) obj);
} catch (java.lang.UnsupportedOperationException e) {
System.out.println("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
System.out.println(e.getMessage());
logger.info("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
logger.info(e.getMessage());
}
}
}