Some more refactoring ... hoping that things get in shape some time ...

This commit is contained in:
ebner 2014-08-28 14:16:50 +02:00
parent 7ff965bc08
commit c049510d70
6 changed files with 521 additions and 539 deletions

View File

@ -132,7 +132,7 @@ public class HDF5GroupedVarnames {
TimeFrame frame = new TimeFrame(frameIndex.intValue()); TimeFrame frame = new TimeFrame(frameIndex.intValue());
int idx = frameList.indexOf(frame); int idx = frameList.indexOf(frame);
if (idx != -1) { if (idx != -1) {
frame = (TimeFrame) frameList.get(idx); frame = frameList.get(idx);
frame.addChannel(channelIndex.intValue()); frame.addChannel(channelIndex.intValue());
} else { } else {
frame.addChannel(channelIndex.intValue()); frame.addChannel(channelIndex.intValue());

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
package ch.psi.imagej.hdf5;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import ncsa.hdf.object.Attribute;
import ncsa.hdf.object.HObject;
public class HDF5Utilities {
private static final Logger logger = Logger.getLogger(HDF5Utilities.class.getName());
/**
* Get attributes from object
* @param object Object to retrieve the attributes from
* @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
*/
public static Map<String,Attribute> getAttributes(HObject object) {
if (object == null){
return null;
}
Map<String, Attribute> attributes = new HashMap<>();
try{
for(Object m: object.getMetadata()){
if(m instanceof Attribute){
attributes.put(((Attribute) m).getName(), (Attribute) m);
}
}
}
catch(Exception e){
logger.warning("Unable to retrieve metadata from object");
return null;
}
return attributes;
}
}

View File

@ -26,9 +26,6 @@ import ij.plugin.filter.PlugInFilter;
import ij.process.*; import ij.process.*;
import ij.gui.*; import ij.gui.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger; import java.util.logging.Logger;
import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultMutableTreeNode;
@ -41,14 +38,7 @@ public class HDF5Writer implements PlugInFilter {
private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName()); private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName());
private Boolean _batchMode = false;
private String _batchFileName = null;
public int setup(String arg, ImagePlus imp) { public int setup(String arg, ImagePlus imp) {
if (arg.equals("about")) {
showAbout();
return DONE;
}
// FIXME: set DOES_xx for image type here: // FIXME: set DOES_xx for image type here:
// currently RGB-Types are still missing // currently RGB-Types are still missing
// see // see
@ -56,34 +46,24 @@ public class HDF5Writer implements PlugInFilter {
return DOES_8G + DOES_16 + DOES_32 + DOES_RGB + NO_CHANGES; return DOES_8G + DOES_16 + DOES_32 + DOES_RGB + NO_CHANGES;
} }
public void setToBatchMode(String filename, String[] varnames) {
_batchMode = true;
_batchFileName = filename;
}
public void run(ImageProcessor ip) { public void run(ImageProcessor ip) {
int[] wList = WindowManager.getIDList();
if (wList == null) { // Check whether windows are open
if (WindowManager.getIDList() == null) {
IJ.error("No windows are open."); IJ.error("No windows are open.");
return; return;
} }
String filename = null; // Query for filename to save datat to
if (_batchMode) {
filename = _batchFileName;
} else {
SaveDialog sd = new SaveDialog("Save HDF5 ...", "", ".h5"); SaveDialog sd = new SaveDialog("Save HDF5 ...", "", ".h5");
String directory = sd.getDirectory(); String directory = sd.getDirectory();
String name = sd.getFileName(); String name = sd.getFileName();
filename = directory + name; if (name == null || name.equals("")){
if (name == null)
return;
if (name == "")
return; return;
} }
String filename = directory + name;
// Retrieve an instance of the implementing class for the HDF5 format // Retrieve an instance of the implementing class for the HDF5 format
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
@ -216,9 +196,7 @@ public class HDF5Writer implements PlugInFilter {
logger.info("selected.length: " + Integer.toString(selected.length)); logger.info("selected.length: " + Integer.toString(selected.length));
logger.info("channelDims.length: " + Integer.toString(channelDims.length)); logger.info("channelDims.length: " + Integer.toString(channelDims.length));
if (nLevs == 1) { if (nLevs == 1) {
for (int d = 0; d < selected.length; d++) { System.arraycopy(channelDims, 0, selected, 0, selected.length);
selected[d] = channelDims[d];
}
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1); int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
logger.info("Stackindex: " + Integer.toString(stackIndex)); logger.info("Stackindex: " + Integer.toString(stackIndex));
// get raw data // get raw data
@ -232,9 +210,7 @@ public class HDF5Writer implements PlugInFilter {
} }
} else { } else {
selected[0] = 1; selected[0] = 1;
for (int d = 1; d < selected.length; d++) { System.arraycopy(channelDims, 1, selected, 1, selected.length - 1);
selected[d] = channelDims[d];
}
long[] start = dataset.getStartDims(); // the long[] start = dataset.getStartDims(); // the
// off // off
// set // set
@ -438,9 +414,7 @@ public class HDF5Writer implements PlugInFilter {
// dataet // dataet
ImageStack stack = imp.getStack(); ImageStack stack = imp.getStack();
if (nLevels == 1) { if (nLevels == 1) {
for (int d = 0; d < selected.length; d++) { System.arraycopy(dims, 0, selected, 0, selected.length);
selected[d] = dims[d];
}
// get raw data // get raw data
Object slice = stack.getPixels(nLevels); Object slice = stack.getPixels(nLevels);
if (imgColorType == ImagePlus.COLOR_RGB) if (imgColorType == ImagePlus.COLOR_RGB)
@ -450,9 +424,7 @@ public class HDF5Writer implements PlugInFilter {
} else { } else {
selected[0] = 1; selected[0] = 1;
for (int d = 1; d < selected.length; d++) { System.arraycopy(dims, 1, selected, 1, selected.length - 1);
selected[d] = dims[d];
}
long[] start = dataset.getStartDims(); // the off set of long[] start = dataset.getStartDims(); // the off set of
// the selection // the selection
for (int lvl = 0; lvl < nLevels; ++lvl) { for (int lvl = 0; lvl < nLevels; ++lvl) {
@ -480,7 +452,7 @@ public class HDF5Writer implements PlugInFilter {
long[] attrDims = { 3 }; long[] attrDims = { 3 };
Attribute element_size_um = null; Attribute element_size_um = null;
try { try {
element_size_um = getAttribute(dataset, "element_size_um"); element_size_um = HDF5Utilities.getAttributes(dataset).get("element_size_um");
} catch (Exception e) { } catch (Exception e) {
element_size_um = null; element_size_um = null;
} }
@ -504,16 +476,6 @@ public class HDF5Writer implements PlugInFilter {
} }
int byteToUnsignedByte(int n) {
if (n < 0)
return (256 + n);
return n;
}
void showAbout() {
IJ.showMessage("About HDF5 Writer:", "Written by Matthias Schlachter\n" + "University of Freiburg, 2010");
}
private static Group createGroupRecursive(String groupRelativName, Group group, FileFormat file) { private static Group createGroupRecursive(String groupRelativName, Group group, FileFormat file) {
if (groupRelativName == null || file == null) if (groupRelativName == null || file == null)
return null; return null;
@ -618,49 +580,12 @@ public class HDF5Writer implements PlugInFilter {
for (int d = 0; d < Rank; ++d) for (int d = 0; d < Rank; ++d)
data_volume *= dataDims[d]; data_volume *= dataDims[d];
if (data_volume < maxChunkVol) { if (data_volume < maxChunkVol) {
for (int d = 0; d < Rank; ++d) System.arraycopy(dataDims, 0, best_chunksize, 0, Rank);
best_chunksize[d] = dataDims[d];
return best_chunksize; return best_chunksize;
} else } else
return null; return null;
} }
private static List<Attribute> getAttrList(Dataset ds) throws Exception {
if (ds == null)
return null;
List<Attribute> attributes = new ArrayList<Attribute>();
List<?> members = ds.getMetadata();
int n = members.size();
Metadata obj = null;
for (int i = 0; i < n; i++) {
obj = (Metadata) members.get(i);
if (obj instanceof Attribute) {
try {
logger.info(((Attribute) obj).getName());
attributes.add((Attribute) obj);
} catch (java.lang.UnsupportedOperationException e) {
logger.info("Caught UnsupportedOperationException datasets2.add((Dataset) obj)");
logger.info(e.getMessage());
}
}
}
return attributes;
}
private static Attribute getAttribute(Dataset ds, String attrName) throws Exception {
List<Attribute> attrList = getAttrList(ds);
Iterator<Attribute> attrIter = attrList.iterator();
while (attrIter.hasNext()) {
Attribute attr = attrIter.next();
if (attr.getName().equals(attrName)) {
return attr;
}
}
return null;
}
private Object computeRgbSlice(Object pixels) { private Object computeRgbSlice(Object pixels) {
byte rgbslice[]; byte rgbslice[];
int size = ((int[]) pixels).length; int size = ((int[]) pixels).length;

View File

@ -19,7 +19,7 @@ public class TimeFrame implements Comparable<TimeFrame> {
public void addChannel(Integer index) { public void addChannel(Integer index) {
if (!channels.contains(index)){ if (!channels.contains(index)){
channels.add(new Integer(index)); channels.add(index);
} }
} }

View File

@ -0,0 +1,25 @@
package ch.psi.imagej.hdf5;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TimeFrameTest {
private TimeFrame timeframe;
@Before
public void setUp() throws Exception {
timeframe = new TimeFrame(1);
}
@After
public void tearDown() throws Exception {
}
@Test
public void test() {
System.out.println(timeframe.toString());
}
}