Merge pull request #2 in IM/ch.psi.imagej.hdf5 from virtualstack to master

# By ebner
# Via ebner
* commit 'e5331e0c2d5d33dffee6906b5db5bda33f144e5a':
  Updated version CTRLHA-109
  Fixed memory leak that was introduced with the VirtualStack workaround - its still a workaround so CTRLHA-109
  tried to implement close of file CTRLHA-109
  implemented a HDF5 Virtual Stack CTRLHA-109
This commit is contained in:
ebner 2015-04-14 14:18:09 +02:00
commit fa892bc7fe
6 changed files with 317 additions and 54 deletions

View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId>
<version>0.8.0</version>
<version>0.9.0</version>
<dependencies>
<dependency>

View File

@ -12,6 +12,7 @@ public class DatasetSelection {
private Integer slice;
// Intervall to read images
private Integer modulo;
private boolean virtualStack;
public List<Dataset> getDatasets() {
return datasets;
@ -37,4 +38,10 @@ public class DatasetSelection {
public Integer getModulo() {
return modulo;
}
public void setVirtualStack(boolean virtualStack) {
this.virtualStack = virtualStack;
}
public boolean isVirtualStack(){
return this.virtualStack;
}
}

View File

@ -51,6 +51,7 @@ public class HDF5Reader implements PlugIn {
// Read HDF5 file
H5File file = null;
boolean close = true;
try {
file = new H5File(filename, H5File.READ);
file.setMaxMembers(Integer.MAX_VALUE);
@ -58,6 +59,8 @@ public class HDF5Reader implements PlugIn {
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
close=!selectedDatasets.isVirtualStack();
// TODO Remove
@ -208,6 +211,11 @@ public class HDF5Reader implements PlugIn {
ImageStack stack;
if(selectedDatasets.isVirtualStack()){
logger.info("Use virtual stack");
stack = new VirtualStackHDF5(file, var);
}
else{
if(selectedDatasets.getSlice()!=null){
// Select what to readout
@ -268,8 +276,9 @@ public class HDF5Reader implements PlugIn {
addSlice(stack, wholeDataset, startIdx, size);
}
}
}
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
imp.resetDisplayRange();
imp.show();
@ -297,9 +306,12 @@ public class HDF5Reader implements PlugIn {
IJ.outOfMemory("Out of memory while loading file: " + filename);
} finally {
try {
// TODO workaround - to be removed
if(close){
if (file != null) {
file.close();
}
}
} catch (HDF5Exception e) {
logger.log(Level.WARNING, "Error while closing: " + filename, e);
IJ.showStatus("Error while closing: " + filename);
@ -335,6 +347,7 @@ public class HDF5Reader implements PlugIn {
selectedDatasets.setGroup(panel.groupValues());
selectedDatasets.setSlice(panel.getSlice());
selectedDatasets.setModulo(panel.getModulo());
selectedDatasets.setVirtualStack(panel.useVirtualStack());
}
return selectedDatasets;

View File

@ -0,0 +1,64 @@
package ch.psi.imagej.hdf5;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.util.logging.Logger;
import ij.ImagePlus;
import ij.ImageStack;
public class ImagePlusHDF5 extends ImagePlus {
private static final Logger logger = Logger.getLogger(ImagePlusHDF5.class.getName());
public ImagePlusHDF5(String title, ImageStack stack) {
super(title, stack);
}
@Override
public void show() {
super.show();
getWindow().addWindowListener(new WindowListener() {
@Override
public void windowOpened(WindowEvent e) {
logger.info("");
}
@Override
public void windowIconified(WindowEvent e) {
logger.info("");
}
@Override
public void windowDeiconified(WindowEvent e) {
logger.info("");
}
@Override
public void windowDeactivated(WindowEvent e) {
logger.info("");
}
@Override
public void windowClosing(WindowEvent e) {
logger.info("Closing");
}
@Override
public void windowClosed(WindowEvent e) {
logger.info("Closed");
ImageStack stack = getStack();
if(stack instanceof VirtualStackHDF5){
((VirtualStackHDF5) stack).close();
}
}
@Override
public void windowActivated(WindowEvent e) {
logger.info("");
}
});
}
}

View File

@ -24,7 +24,7 @@ public class SelectionPanel extends JPanel {
private final JList<Dataset> list;
private JCheckBox checkbox;
private JCheckBox chckbxNewCheckBox;
private JCheckBox checkBoxVirtualStack;
private JLabel lblSlice;
private JPanel panel;
private JTextField textField;
@ -63,8 +63,9 @@ public class SelectionPanel extends JPanel {
checkbox = new JCheckBox("Group Datasets (2D datasets only)");
add(checkbox);
chckbxNewCheckBox = new JCheckBox("Virtual Stack");
add(chckbxNewCheckBox);
checkBoxVirtualStack = new JCheckBox("Virtual Stack");
checkBoxVirtualStack.setSelected(true);
add(checkBoxVirtualStack);
panel = new JPanel();
FlowLayout flowLayout = (FlowLayout) panel.getLayout();
@ -102,4 +103,8 @@ public class SelectionPanel extends JPanel {
}
return null;
}
public boolean useVirtualStack(){
return checkBoxVirtualStack.isSelected();
}
}

View File

@ -0,0 +1,174 @@
package ch.psi.imagej.hdf5;
import java.util.logging.Level;
import java.util.logging.Logger;
import ncsa.hdf.object.Dataset;
import ncsa.hdf.object.h5.H5File;
import ij.ImageStack;
import ij.process.ByteProcessor;
import ij.process.ColorProcessor;
import ij.process.FloatProcessor;
import ij.process.ImageProcessor;
import ij.process.ShortProcessor;
public class VirtualStackHDF5 extends ImageStack {
private static final Logger logger = Logger.getLogger(VirtualStackHDF5.class.getName());
private int bitDepth = 0;
private Dataset dataset;
private H5File file;
public VirtualStackHDF5(H5File file, Dataset dataset){
super((int) dataset.getDims()[2], (int) dataset.getDims()[1]);
this.dataset = dataset;
this.file = file;
}
/** Does noting. */
public void addSlice(String sliceLabel, Object pixels) {
}
/** Does nothing.. */
public void addSlice(String sliceLabel, ImageProcessor ip) {
}
/** Does noting. */
public void addSlice(String sliceLabel, ImageProcessor ip, int n) {
}
/** Does noting. */
public void deleteSlice(int n) {
}
/** Does noting. */
public void deleteLastSlice() {
}
public Object getPixels(int slice) {
try {
long[] dimensions = dataset.getDims();
// Select what to readout
long[] selected = dataset.getSelectedDims();
selected[0] = 1;
selected[1] = dimensions[1];
selected[2] = dimensions[2];
long[] start = dataset.getStartDims();
start[0] = slice-1; // Indexing at image J starts at 1
Object wholeDataset = dataset.read();
if (wholeDataset instanceof byte[]) {
return (byte[]) wholeDataset;
} else if (wholeDataset instanceof short[]) {
return (short[]) wholeDataset;
} else if (wholeDataset instanceof int[]) {
return HDF5Utilities.convertToFloat((int[]) wholeDataset);
} else if (wholeDataset instanceof long[]) {
return HDF5Utilities.convertToFloat((long[]) wholeDataset);
} else if (wholeDataset instanceof float[]) {
return (float[]) wholeDataset;
} else if (wholeDataset instanceof double[]) {
return HDF5Utilities.convertToFloat((double[]) wholeDataset);
} else {
logger.warning("Datatype not supported");
}
} catch (OutOfMemoryError | Exception e) {
logger.log(Level.WARNING, "Unable to open slice", e);
}
return null;
}
/**
* Assigns a pixel array to the specified slice, were 1<=n<=nslices.
*/
public void setPixels(Object pixels, int n) {
}
/**
* Returns an ImageProcessor for the specified slice, were 1<=n<=nslices.
* Returns null if the stack is empty.
*/
public ImageProcessor getProcessor(int slice) {
long[] dimensions = dataset.getDims();
final Object pixels = getPixels(slice);
// Todo support more ImageProcessor types
ImageProcessor ip;
if (pixels instanceof byte[]){
ip = new ByteProcessor((int) dimensions[2], (int) dimensions[1]);
}
else if (pixels instanceof short[]){
ip = new ShortProcessor((int) dimensions[2], (int) dimensions[1]);
}
else if (pixels instanceof int[]){
ip = new ColorProcessor((int) dimensions[2], (int) dimensions[1]);
}
else if (pixels instanceof float[]){
ip = new FloatProcessor((int) dimensions[2], (int) dimensions[1]);
}
else {
throw new IllegalArgumentException("Unknown stack type");
}
ip.setPixels(pixels);
return ip;
}
/** Returns the number of slices in this stack. */
public int getSize() {
return (int) this.dataset.getDims()[0];
}
/** Returns the label of the Nth image. */
public String getSliceLabel(int slice) {
return "Slice: "+slice;
}
/** Returns null. */
public Object[] getImageArray() {
return null;
}
/** Does nothing. */
public void setSliceLabel(String label, int n) {
}
/** Always return true. */
public boolean isVirtual() {
return true;
}
/** Does nothing. */
public void trim() {
}
/**
* Returns the bit depth (8, 16, 24 or 32), or 0 if the bit depth is not
* known.
*/
public int getBitDepth() {
return bitDepth;
}
/**
* Close HDF5 file
*/
public void close() {
logger.info("Closing HDF5 file");
try{
file.close();
}
catch(Exception e){
logger.log(Level.WARNING, "Unable to close HDF5 file", e);
}
}
}