Removed unnecessary writer code

This commit is contained in:
ebner 2014-09-30 13:48:04 +02:00
parent 10b0052c6f
commit 11e891e8d4
5 changed files with 61 additions and 452 deletions

View File

@ -1,273 +0,0 @@
package ch.psi.imagej.hdf5;
import java.util.logging.Logger;
import java.util.regex.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class HDF5GroupedVarnames {
private static final Logger logger = Logger.getLogger(HDF5GroupedVarnames.class.getName());
private final List<String> matchedVarNames = new ArrayList<String>();
private final List<String> unMatchedVarNames = new ArrayList<String>();
private final List<TimeFrame> frameList = new ArrayList<TimeFrame>();
private String[] formatTokens = null;
private String formatString = null;
private int minFrameIndex = -1;
private int maxFrameIndex = -1;
private int minChannelIndex = -1;
private int maxChannelIndex = -1;
private int nChannels = -1;
public static String[] parseFormatString(String groupVarsByNameFormat, String dollarRegexpForGrouping) throws PatternSyntaxException {
String[] formatTokens = null;
formatTokens = groupVarsByNameFormat.split("([$]T|[$]C)");
boolean containsFormatVars = groupVarsByNameFormat.contains("$T") && groupVarsByNameFormat.contains("$C");
boolean rightOrderOfFormatVars = groupVarsByNameFormat.indexOf("$T") < groupVarsByNameFormat.indexOf("$C");
for (int i = 0; i < formatTokens.length; i++) {
logger.info("tok " + Integer.toString(i) + " : " + formatTokens[i]);
}
if (formatTokens.length < 2 || !containsFormatVars || !rightOrderOfFormatVars) {
throw new PatternSyntaxException("Your format string has errors. " + "You must provide $T and $C and " + "also in correct order!", groupVarsByNameFormat, -1);
}
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
logger.info(regexp);
// check if we have a regexp;
Pattern.compile(regexp);
return formatTokens;
}
public void parseVarNames(String[] varNames, String groupVarsByNameFormat, String dollarRegexpForGrouping) {
// save format string
formatString = groupVarsByNameFormat;
try {
formatTokens = parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
} catch (PatternSyntaxException e) {
// produce an error dialog an start over
String errMsg = e.getMessage();
logger.info(errMsg);
return;
}
String regexp = groupVarsByNameFormat;
regexp = regexp.replace("$T", dollarRegexpForGrouping);
regexp = regexp.replace("$C", dollarRegexpForGrouping);
logger.info(regexp);
// check if we have a regexp;
Pattern p = null;
p = Pattern.compile(regexp);
/*---------------------------------------------------------------------
* parse var names
*---------------------------------------------------------------------*/
for (int i = 0; i < varNames.length; i++) {
Matcher m = p.matcher(varNames[i]);
boolean b = m.matches();
if (b) {
logger.info(varNames[i]);
matchedVarNames.add(varNames[i]);
} else {
unMatchedVarNames.add(varNames[i]);
}
}
splitGroupedVarnames();
// ugly hack for sorting ArrayList
Object[] frameListAsArray = frameList.toArray();
Arrays.sort(frameListAsArray);
for (int i = 0; i < frameListAsArray.length; i++)
frameList.set(i, (TimeFrame) frameListAsArray[i]);
}
public TimeFrame getFrame(int i) {
if (i < frameList.size() && i > -1)
return frameList.get(i);
else
return null;
}
private void splitGroupedVarnames() {
Iterator<String> vars = matchedVarNames.iterator();
while (vars.hasNext()) {
String varName = vars.next();
String[] tokens = null;
if (formatTokens.length == 2) {
tokens = varName.split(formatTokens[1]);
} else if (formatTokens.length == 3) {
tokens = varName.split(formatTokens[2]);
varName = tokens[0];
tokens = varName.split(formatTokens[1]);
}
if (tokens.length < 2 || tokens.length > 3) {
logger.info("Error parsing varname!");
} else {
Integer channelIndex = new Integer(tokens[1]);
logger.info("channelIndex: " + channelIndex.toString());
logger.info("left token: " + tokens[0]);
tokens = tokens[0].split("/t");
Integer frameIndex = new Integer(tokens[1]);
logger.info("frameIndex: " + frameIndex.toString());
if (minFrameIndex == -1)
minFrameIndex = frameIndex.intValue();
minFrameIndex = Math.min(minFrameIndex, frameIndex.intValue());
if (maxFrameIndex == -1)
maxFrameIndex = frameIndex.intValue();
maxFrameIndex = Math.max(maxFrameIndex, frameIndex.intValue());
if (minChannelIndex == -1)
minChannelIndex = channelIndex.intValue();
minChannelIndex = Math.min(minChannelIndex, channelIndex.intValue());
if (maxChannelIndex == -1)
maxChannelIndex = channelIndex.intValue();
maxChannelIndex = Math.max(maxChannelIndex, channelIndex.intValue());
TimeFrame frame = new TimeFrame(frameIndex.intValue());
int idx = frameList.indexOf(frame);
if (idx != -1) {
frame = frameList.get(idx);
frame.addChannel(channelIndex.intValue());
} else {
frame.addChannel(channelIndex.intValue());
frameList.add(frame);
}
// logger.info(frame.toString());
}
}
}
public int getMinFrameIndex() {
return minFrameIndex;
}
public int getMaxFrameIndex() {
return maxFrameIndex;
}
public int getMinChannelIndex() {
return minChannelIndex;
}
public int getMaxChannelIndex() {
return maxChannelIndex;
}
public int getNFrames() {
return frameList.size();
}
public int getNChannels() {
// TODO: check all frames for min/max of channels not index
if (nChannels == -1)
return maxChannelIndex - minChannelIndex + 1;
else
return nChannels;
}
public boolean hasAllFramesInRange() {
return frameList.size() == (maxFrameIndex - minFrameIndex + 1);
}
public String toString() {
String s = "Data set statistics\n";
s = s + "----------------------------------\n";
s = s + "nFrames: " + Integer.toString(frameList.size()) + "\n";
s = s + "minFrameIndex: " + Integer.toString(minFrameIndex) + "\n";
s = s + "maxFrameIndex: " + Integer.toString(maxFrameIndex) + "\n";
s = s + "hasAllFramesInRange: " + Boolean.toString(hasAllFramesInRange()) + "\n";
s = s + "minChannelIndex: " + Integer.toString(minChannelIndex) + "\n";
s = s + "maxChannelIndex: " + Integer.toString(maxChannelIndex) + "\n";
// String[] toks = getFormatTokens();
Iterator<TimeFrame> frames = frameList.iterator();
while (frames.hasNext()) {
TimeFrame f = frames.next();
s = s + f.toString() + "\n";
// s = s + "(" + toks[0] +
// Integer.toString(f.getFrameIndex())
// + toks[1] + "$C";
// if(toks.length>2)
// s = s + toks[2] + "\n";
// else
// s = s + "\n";
}
s = s + "----------------------------------";
return s;
}
public List<String> getUnmatchedVarNames() {
return unMatchedVarNames;
}
public String[] getFormatTokens() {
return formatTokens;
}
public String getFormatString() {
return formatString;
}
public void setFrameAndChannelRange(int minFrame, int skipFrame, int maxFrame, int minChannel, int skipChannel, int maxChannel) {
logger.info("Setting frame range: " + Integer.toString(minFrame) + ":" + Integer.toString(skipFrame) + ":" + Integer.toString(maxFrame));
logger.info("Setting channel range: " + Integer.toString(minChannel) + ":" + Integer.toString(skipChannel) + ":" + Integer.toString(maxChannel));
if (hasAllFramesInRange()) {
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
// clear frames
frameList.clear();
// insert wanted frames and channels
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
TimeFrame frameAllChannels = completeFrameList.get(f);
TimeFrame frame = new TimeFrame(frameAllChannels.getFrameIndex());
// TODO remove unwanted channels
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// logger.info("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
// if(nChannels == -1)
// nChannels = frame.getNChannels();
frameList.add(frame);
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
logger.info("Adding nChannels: " + Integer.toString(nChannels));
} else {
logger.info("-------------------------\n" + "hasAllFramesInRange==false\n" + "-------------------------");
// copy frames
List<TimeFrame> completeFrameList = new ArrayList<TimeFrame>(frameList);
// clear frames
frameList.clear();
// insert wanted frames and channels
for (int f = minFrame; f < maxFrame + 1; f += skipFrame) {
TimeFrame frame = new TimeFrame(f);
int idx = completeFrameList.indexOf(frame);
// logger.info("index of frame in list: " +
// Integer.toString(idx));
if (idx != -1) {
// TODO remove unwanted channels
for (int c = minChannel; c < maxChannel + 1; c += skipChannel) {
// logger.info("Adding channels: " +
// Integer.toString(c));
frame.addChannel(c);
}
// if(nChannels == -1)
// nChannels = frame.getNChannels();
frameList.add(frame);
} else {
logger.info("Timestep " + Integer.toString(f) + " is missing!");
}
}
// TODO update min/max of frames/channels
nChannels = ((maxChannel - minChannel) / skipChannel) + 1;
logger.info("Adding nChannels: " + Integer.toString(nChannels));
}
}
}

View File

@ -51,10 +51,7 @@ public class HDF5Reader implements PlugIn {
public void run(String arg) {
// make sure default values for config are written
// HDF5_Config.setDefaultsIfNoValueExists();
// Run plugin
String directory = "";
String name = "";
boolean tryAgain;
@ -195,7 +192,7 @@ public class HDF5Reader implements PlugIn {
for (int lev = 0; lev < dimensions[1]; ++lev) {
int startIdx = (int) ((volIDX * singleVolumeSize) + (lev * stackSize));
int endIdx = (int) (startIdx + stackSize);
convertDatatypesAndSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
addSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
}
}
@ -245,7 +242,7 @@ public class HDF5Reader implements PlugIn {
for (int lev = 0; lev < dimensions[0]; ++lev) {
int startIdx = lev * stackSize;
int endIdx = startIdx + stackSize;
convertDatatypesAndSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
addSlice(datatypeIfUnsupported, stack, wholeDataset, startIdx, endIdx);
}
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
@ -259,38 +256,7 @@ public class HDF5Reader implements PlugIn {
wholeDataset = checkUnsigned(datatype, wholeDataset);
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
if (wholeDataset instanceof byte[]) {
byte[] tmp = (byte[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof short[]) {
short[] tmp = (short[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof int[]) {
int[] tmp = (int[]) wholeDataset;
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
stack.addSlice(null, HDF5Utilities.convertToFloat(tmp));
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
stack.addSlice(null, HDF5Utilities.convertToShort(tmp));
}
} else if (wholeDataset instanceof long[]) {
long[] tmp = (long[]) wholeDataset;
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
stack.addSlice(null, HDF5Utilities.convertToFloat(tmp));
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
stack.addSlice(null, HDF5Utilities.convertToShort(tmp));
}
} else if (wholeDataset instanceof float[]) {
float[] tmp = (float[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof double[]) {
float[] tmp = HDF5Utilities.convertToFloat((double[]) wholeDataset);
stack.addSlice(null, tmp);
} else {
// try to put pixels on stack
stack.addSlice(null, wholeDataset);
}
addSlice(datatypeIfUnsupported, stack, wholeDataset);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
imp.resetDisplayRange();
@ -460,13 +426,14 @@ public class HDF5Reader implements PlugIn {
/**
* Add slice to image stack
* @param datatypeIfUnsupported
* @param stack
* @param wholeDataset
* @param startIdx
* @param endIdx
*/
private void convertDatatypesAndSlice(Datatype datatypeIfUnsupported, ImageStack stack, Object wholeDataset, int startIdx, int endIdx) {
private void addSlice(Datatype datatypeIfUnsupported, ImageStack stack, Object wholeDataset, int startIdx, int endIdx) {
if (wholeDataset instanceof byte[]) {
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
stack.addSlice(null, tmp);
@ -496,9 +463,50 @@ public class HDF5Reader implements PlugIn {
float[] tmp = HDF5Utilities.convertToFloat(Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx));
stack.addSlice(null, tmp);
} else {
logger.warning("Not supported array type");
logger.warning("Datatype not supported");
}
}
/**
* Add slice to image stack
* @param datatypeIfUnsupported
* @param stack
* @param wholeDataset
*/
private void addSlice(Datatype datatypeIfUnsupported, ImageStack stack, Object wholeDataset){
if (wholeDataset instanceof byte[]) {
byte[] tmp = (byte[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof short[]) {
short[] tmp = (short[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof int[]) {
int[] tmp = (int[]) wholeDataset;
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
stack.addSlice(null, HDF5Utilities.convertToFloat(tmp));
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
stack.addSlice(null, HDF5Utilities.convertToShort(tmp));
}
} else if (wholeDataset instanceof long[]) {
long[] tmp = (long[]) wholeDataset;
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
stack.addSlice(null, HDF5Utilities.convertToFloat(tmp));
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
stack.addSlice(null, HDF5Utilities.convertToShort(tmp));
}
} else if (wholeDataset instanceof float[]) {
float[] tmp = (float[]) wholeDataset;
stack.addSlice(null, tmp);
} else if (wholeDataset instanceof double[]) {
float[] tmp = HDF5Utilities.convertToFloat((double[]) wholeDataset);
stack.addSlice(null, tmp);
} else {
logger.warning("Datatype not supported");
}
}
/**
* @param datatypeIfUnsupported
@ -891,7 +899,9 @@ public class HDF5Reader implements PlugIn {
/** Adds AWT scroll bars to the given container. */
/**
* Add AWT scroll bars to the given container.
*/
public static void addScrollBars(Container pane) {
GridBagLayout layout = (GridBagLayout) pane.getLayout();
@ -917,10 +927,6 @@ public class HDF5Reader implements PlugIn {
newPane.add(c[i]);
}
// HACK - get preferred size for container panel
// NB: don't know a better way:
// - newPane.getPreferredSize() doesn't work
// - newLayout.preferredLayoutSize(newPane) doesn't work
Frame f = new Frame();
f.setLayout(new BorderLayout());
f.add(newPane, BorderLayout.WEST);
@ -935,10 +941,12 @@ public class HDF5Reader implements PlugIn {
Dimension screen = Toolkit.getDefaultToolkit().getScreenSize();
int maxWidth = 3 * screen.width / 4;
int maxHeight = 3 * screen.height / 4;
if (size.width > maxWidth)
if (size.width > maxWidth){
size.width = maxWidth;
if (size.height > maxHeight)
}
if (size.height > maxHeight){
size.height = maxHeight;
}
// create scroll pane
ScrollPane scroll = new ScrollPane() {

View File

@ -37,13 +37,11 @@ public class HDF5Writer implements PlugInFilter {
private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName());
public int setup(String arg, ImagePlus imp) {
// FIXME: set DOES_xx for image type here:
// currently RGB-Types are still missing
// see
// http://rsb.info.nih.gov/ij/developer/api/ij/plugin/filter/PlugInFilter.html
// see http://rsb.info.nih.gov/ij/developer/api/ij/plugin/filter/PlugInFilter.html
return DOES_8G + DOES_16 + DOES_32 + DOES_RGB + NO_CHANGES;
}
public void run(ImageProcessor ip) {
// Check whether windows are open
@ -52,14 +50,13 @@ public class HDF5Writer implements PlugInFilter {
return;
}
// Query for filename to save datat to
// Query for filename to save data
SaveDialog sd = new SaveDialog("Save HDF5 ...", "", ".h5");
String directory = sd.getDirectory();
String name = sd.getFileName();
if (name == null || name.equals("")){
return;
}
String filename = directory + name;
// Retrieve an instance of the implementing class for the HDF5 format
@ -103,7 +100,7 @@ public class HDF5Writer implements PlugInFilter {
// check for hyperstack
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
logger.info("This is a hyperstack");
gd.addStringField(imp.getTitle(), "/t$T/channel$C");
gd.addStringField(imp.getTitle(), "/t$F/channel$C");
gd.showDialog();
if (gd.wasCanceled()) {
@ -123,7 +120,6 @@ public class HDF5Writer implements PlugInFilter {
// Split frames and channels
// parse format string
String[] formatTokens = HDF5GroupedVarnames.parseFormatString(formatString, "[0-9]+"); // dummy
// regexp
long[] channelDims = null;
if (nSlices > 1) {
@ -141,7 +137,9 @@ public class HDF5Writer implements PlugInFilter {
for (int f = 0; f < nFrames; f++) {
IJ.showProgress(f, nFrames);
for (int c = 0; c < nChannels; c++) {
String fullName = makeDataSetName(formatTokens, f, c);
String fullName = formatString;
fullName = fullName.replaceAll("$F", f+"");
fullName = fullName.replaceAll("$C", c+"");
String dataSetName = HDF5Utilities.getDataSetDescriptor(fullName);
String groupName = HDF5Utilities.getGroupDescriptor(fullName);
@ -389,20 +387,6 @@ public class HDF5Writer implements PlugInFilter {
}
long[] findOptimalChunksize(int Rank, long[] dataDims) {
long[] best_chunksize = new long[Rank];
int maxChunkVol = 262144;
// small sanity check first:
int data_volume = 1;
for (int d = 0; d < Rank; ++d)
data_volume *= dataDims[d];
if (data_volume < maxChunkVol) {
System.arraycopy(dataDims, 0, best_chunksize, 0, Rank);
return best_chunksize;
} else
return null;
}
private Object computeRgbSlice(Object pixels) {
byte rgbslice[];
int size = ((int[]) pixels).length;
@ -417,11 +401,4 @@ public class HDF5Writer implements PlugInFilter {
}
return rgbslice;
}
private String makeDataSetName(String[] toks, int frame, int channel) {
String dName = toks[0] + Integer.toString(frame) + toks[1] + Integer.toString(channel);
if (toks.length > 2)
dName = dName + toks[2];
return dName;
}
}

View File

@ -1,78 +0,0 @@
package ch.psi.imagej.hdf5;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class TimeFrame implements Comparable<TimeFrame> {
private final int frameIndex;
private final List<Integer> channels = new ArrayList<Integer>();
public TimeFrame(int index) {
frameIndex = index;
}
public TimeFrame(String index) {
frameIndex = Integer.parseInt(index);
}
public void addChannel(Integer index) {
if (!channels.contains(index)){
channels.add(index);
}
}
public void addChannel(String index) {
addChannel(Integer.parseInt(index));
}
public boolean equals(Object o) {
return (((TimeFrame)o).frameIndex == frameIndex);
}
public String toString() {
StringBuffer b = new StringBuffer();
b.append("FrameIdx: ");
b.append(frameIndex);
b.append("; nChannels: ");
b.append(channels.size());
b.append("; Channels: ");
for(Integer c: channels){
b.append(c);
b.append(";");
}
return b.toString();
}
public int getNChannels() {
return channels.size();
}
public int getFrameIndex() {
return frameIndex;
}
public int[] getChannelIndices() {
Object[] channelsAsArray = channels.toArray();
Arrays.sort(channelsAsArray);
int[] channelsIdx = new int[channelsAsArray.length];
for (int i = 0; i < channelsAsArray.length; i++){
channelsIdx[i] = ((Integer) channelsAsArray[i]).intValue();
}
return channelsIdx;
}
public int compareTo(TimeFrame f) {
if (frameIndex < f.frameIndex){
return -1;
}
else if (frameIndex > f.frameIndex){
return 1;
}
else{
return 0;
}
}
}

View File

@ -1,25 +0,0 @@
package ch.psi.imagej.hdf5;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TimeFrameTest {
private TimeFrame timeframe;
@Before
public void setUp() throws Exception {
timeframe = new TimeFrame(1);
}
@After
public void tearDown() throws Exception {
}
@Test
public void test() {
System.out.println(timeframe.toString());
}
}