mirror of
https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5.git
synced 2025-04-20 05:00:03 +02:00
Merge pull request #2 in IM/ch.psi.imagej.hdf5 from virtualstack to master
# By ebner # Via ebner * commit 'e5331e0c2d5d33dffee6906b5db5bda33f144e5a': Updated version CTRLHA-109 Fixed memory leak that was introduced with the VirtualStack workaround - its still a workaround so CTRLHA-109 tried to implement close of file CTRLHA-109 implemented a HDF5 Virtual Stack CTRLHA-109
This commit is contained in:
commit
fa892bc7fe
2
pom.xml
2
pom.xml
@ -3,7 +3,7 @@
|
|||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ch.psi</groupId>
|
<groupId>ch.psi</groupId>
|
||||||
<artifactId>imagej.hdf5</artifactId>
|
<artifactId>imagej.hdf5</artifactId>
|
||||||
<version>0.8.0</version>
|
<version>0.9.0</version>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -12,6 +12,7 @@ public class DatasetSelection {
|
|||||||
private Integer slice;
|
private Integer slice;
|
||||||
// Intervall to read images
|
// Intervall to read images
|
||||||
private Integer modulo;
|
private Integer modulo;
|
||||||
|
private boolean virtualStack;
|
||||||
|
|
||||||
public List<Dataset> getDatasets() {
|
public List<Dataset> getDatasets() {
|
||||||
return datasets;
|
return datasets;
|
||||||
@ -37,4 +38,10 @@ public class DatasetSelection {
|
|||||||
public Integer getModulo() {
|
public Integer getModulo() {
|
||||||
return modulo;
|
return modulo;
|
||||||
}
|
}
|
||||||
|
public void setVirtualStack(boolean virtualStack) {
|
||||||
|
this.virtualStack = virtualStack;
|
||||||
|
}
|
||||||
|
public boolean isVirtualStack(){
|
||||||
|
return this.virtualStack;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,6 +51,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
// Read HDF5 file
|
// Read HDF5 file
|
||||||
H5File file = null;
|
H5File file = null;
|
||||||
|
boolean close = true;
|
||||||
try {
|
try {
|
||||||
file = new H5File(filename, H5File.READ);
|
file = new H5File(filename, H5File.READ);
|
||||||
file.setMaxMembers(Integer.MAX_VALUE);
|
file.setMaxMembers(Integer.MAX_VALUE);
|
||||||
@ -58,6 +59,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
|
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
|
||||||
DatasetSelection selectedDatasets = selectDatasets(datasets);
|
DatasetSelection selectedDatasets = selectDatasets(datasets);
|
||||||
|
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
|
||||||
|
close=!selectedDatasets.isVirtualStack();
|
||||||
|
|
||||||
|
|
||||||
// TODO Remove
|
// TODO Remove
|
||||||
@ -208,68 +211,74 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
ImageStack stack;
|
ImageStack stack;
|
||||||
|
|
||||||
if(selectedDatasets.getSlice()!=null){
|
if(selectedDatasets.isVirtualStack()){
|
||||||
|
logger.info("Use virtual stack");
|
||||||
// Select what to readout
|
stack = new VirtualStackHDF5(file, var);
|
||||||
long[] selected = var.getSelectedDims();
|
|
||||||
selected[0] = 1;
|
|
||||||
selected[1] = dimensions[1];
|
|
||||||
selected[2] = dimensions[2];
|
|
||||||
|
|
||||||
long[] start = var.getStartDims();
|
|
||||||
start[0] = selectedDatasets.getSlice();
|
|
||||||
|
|
||||||
Object wholeDataset = var.read();
|
|
||||||
|
|
||||||
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
|
||||||
int size = (int) (dimensions[1] * dimensions[2]);
|
|
||||||
|
|
||||||
// int startIdx = selectedDatasets.getSlice() * size;
|
|
||||||
addSlice(stack, wholeDataset, 0, size);
|
|
||||||
}
|
}
|
||||||
else if(selectedDatasets.getModulo()!=null){
|
else{
|
||||||
logger.info("Read every "+selectedDatasets.getModulo()+" image");
|
if(selectedDatasets.getSlice()!=null){
|
||||||
// Select what to readout
|
|
||||||
|
|
||||||
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
|
||||||
|
|
||||||
for(int indexToRead=0;indexToRead<dimensions[0]; indexToRead=indexToRead+selectedDatasets.getModulo()){
|
|
||||||
|
|
||||||
|
// Select what to readout
|
||||||
long[] selected = var.getSelectedDims();
|
long[] selected = var.getSelectedDims();
|
||||||
selected[0] = 1;
|
selected[0] = 1;
|
||||||
selected[1] = dimensions[1];
|
selected[1] = dimensions[1];
|
||||||
selected[2] = dimensions[2];
|
selected[2] = dimensions[2];
|
||||||
|
|
||||||
long[] start = var.getStartDims();
|
long[] start = var.getStartDims();
|
||||||
start[0] = indexToRead;
|
start[0] = selectedDatasets.getSlice();
|
||||||
|
|
||||||
|
Object wholeDataset = var.read();
|
||||||
|
|
||||||
|
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
int size = (int) (dimensions[1] * dimensions[2]);
|
||||||
|
|
||||||
|
// int startIdx = selectedDatasets.getSlice() * size;
|
||||||
|
addSlice(stack, wholeDataset, 0, size);
|
||||||
|
}
|
||||||
|
else if(selectedDatasets.getModulo()!=null){
|
||||||
|
logger.info("Read every "+selectedDatasets.getModulo()+" image");
|
||||||
|
// Select what to readout
|
||||||
|
|
||||||
|
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
|
||||||
|
for(int indexToRead=0;indexToRead<dimensions[0]; indexToRead=indexToRead+selectedDatasets.getModulo()){
|
||||||
|
|
||||||
|
long[] selected = var.getSelectedDims();
|
||||||
|
selected[0] = 1;
|
||||||
|
selected[1] = dimensions[1];
|
||||||
|
selected[2] = dimensions[2];
|
||||||
|
|
||||||
|
long[] start = var.getStartDims();
|
||||||
|
start[0] = indexToRead;
|
||||||
|
|
||||||
|
Object wholeDataset = var.read();
|
||||||
|
|
||||||
|
int size = (int) (dimensions[1] * dimensions[2]);
|
||||||
|
// int startIdx = selectedDatasets.getSlice() * size;
|
||||||
|
addSlice(stack, wholeDataset, 0, size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
// Select what to readout
|
||||||
|
long[] selected = var.getSelectedDims();
|
||||||
|
selected[0] = dimensions[0];
|
||||||
|
selected[1] = dimensions[1];
|
||||||
|
selected[2] = dimensions[2];
|
||||||
|
|
||||||
|
|
||||||
Object wholeDataset = var.read();
|
Object wholeDataset = var.read();
|
||||||
|
|
||||||
|
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||||
int size = (int) (dimensions[1] * dimensions[2]);
|
int size = (int) (dimensions[1] * dimensions[2]);
|
||||||
// int startIdx = selectedDatasets.getSlice() * size;
|
|
||||||
addSlice(stack, wholeDataset, 0, size);
|
for (int lev = 0; lev < dimensions[0]; ++lev) {
|
||||||
}
|
int startIdx = lev * size;
|
||||||
}
|
addSlice(stack, wholeDataset, startIdx, size);
|
||||||
else{
|
}
|
||||||
// Select what to readout
|
|
||||||
long[] selected = var.getSelectedDims();
|
|
||||||
selected[0] = dimensions[0];
|
|
||||||
selected[1] = dimensions[1];
|
|
||||||
selected[2] = dimensions[2];
|
|
||||||
|
|
||||||
|
|
||||||
Object wholeDataset = var.read();
|
|
||||||
|
|
||||||
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
|
||||||
int size = (int) (dimensions[1] * dimensions[2]);
|
|
||||||
|
|
||||||
for (int lev = 0; lev < dimensions[0]; ++lev) {
|
|
||||||
int startIdx = lev * size;
|
|
||||||
addSlice(stack, wholeDataset, startIdx, size);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
|
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
|
||||||
imp.resetDisplayRange();
|
imp.resetDisplayRange();
|
||||||
imp.show();
|
imp.show();
|
||||||
|
|
||||||
@ -297,8 +306,11 @@ public class HDF5Reader implements PlugIn {
|
|||||||
IJ.outOfMemory("Out of memory while loading file: " + filename);
|
IJ.outOfMemory("Out of memory while loading file: " + filename);
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
if (file != null) {
|
// TODO workaround - to be removed
|
||||||
file.close();
|
if(close){
|
||||||
|
if (file != null) {
|
||||||
|
file.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (HDF5Exception e) {
|
} catch (HDF5Exception e) {
|
||||||
logger.log(Level.WARNING, "Error while closing: " + filename, e);
|
logger.log(Level.WARNING, "Error while closing: " + filename, e);
|
||||||
@ -335,6 +347,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
selectedDatasets.setGroup(panel.groupValues());
|
selectedDatasets.setGroup(panel.groupValues());
|
||||||
selectedDatasets.setSlice(panel.getSlice());
|
selectedDatasets.setSlice(panel.getSlice());
|
||||||
selectedDatasets.setModulo(panel.getModulo());
|
selectedDatasets.setModulo(panel.getModulo());
|
||||||
|
selectedDatasets.setVirtualStack(panel.useVirtualStack());
|
||||||
}
|
}
|
||||||
|
|
||||||
return selectedDatasets;
|
return selectedDatasets;
|
||||||
|
64
src/main/java/ch/psi/imagej/hdf5/ImagePlusHDF5.java
Normal file
64
src/main/java/ch/psi/imagej/hdf5/ImagePlusHDF5.java
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
package ch.psi.imagej.hdf5;
|
||||||
|
|
||||||
|
import java.awt.event.WindowEvent;
|
||||||
|
import java.awt.event.WindowListener;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import ij.ImagePlus;
|
||||||
|
import ij.ImageStack;
|
||||||
|
|
||||||
|
public class ImagePlusHDF5 extends ImagePlus {
|
||||||
|
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ImagePlusHDF5.class.getName());
|
||||||
|
|
||||||
|
public ImagePlusHDF5(String title, ImageStack stack) {
|
||||||
|
super(title, stack);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void show() {
|
||||||
|
super.show();
|
||||||
|
getWindow().addWindowListener(new WindowListener() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowOpened(WindowEvent e) {
|
||||||
|
logger.info("");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowIconified(WindowEvent e) {
|
||||||
|
logger.info("");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowDeiconified(WindowEvent e) {
|
||||||
|
logger.info("");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowDeactivated(WindowEvent e) {
|
||||||
|
logger.info("");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowClosing(WindowEvent e) {
|
||||||
|
logger.info("Closing");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowClosed(WindowEvent e) {
|
||||||
|
logger.info("Closed");
|
||||||
|
ImageStack stack = getStack();
|
||||||
|
if(stack instanceof VirtualStackHDF5){
|
||||||
|
((VirtualStackHDF5) stack).close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void windowActivated(WindowEvent e) {
|
||||||
|
logger.info("");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -24,7 +24,7 @@ public class SelectionPanel extends JPanel {
|
|||||||
|
|
||||||
private final JList<Dataset> list;
|
private final JList<Dataset> list;
|
||||||
private JCheckBox checkbox;
|
private JCheckBox checkbox;
|
||||||
private JCheckBox chckbxNewCheckBox;
|
private JCheckBox checkBoxVirtualStack;
|
||||||
private JLabel lblSlice;
|
private JLabel lblSlice;
|
||||||
private JPanel panel;
|
private JPanel panel;
|
||||||
private JTextField textField;
|
private JTextField textField;
|
||||||
@ -63,8 +63,9 @@ public class SelectionPanel extends JPanel {
|
|||||||
checkbox = new JCheckBox("Group Datasets (2D datasets only)");
|
checkbox = new JCheckBox("Group Datasets (2D datasets only)");
|
||||||
add(checkbox);
|
add(checkbox);
|
||||||
|
|
||||||
chckbxNewCheckBox = new JCheckBox("Virtual Stack");
|
checkBoxVirtualStack = new JCheckBox("Virtual Stack");
|
||||||
add(chckbxNewCheckBox);
|
checkBoxVirtualStack.setSelected(true);
|
||||||
|
add(checkBoxVirtualStack);
|
||||||
|
|
||||||
panel = new JPanel();
|
panel = new JPanel();
|
||||||
FlowLayout flowLayout = (FlowLayout) panel.getLayout();
|
FlowLayout flowLayout = (FlowLayout) panel.getLayout();
|
||||||
@ -102,4 +103,8 @@ public class SelectionPanel extends JPanel {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean useVirtualStack(){
|
||||||
|
return checkBoxVirtualStack.isSelected();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
174
src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java
Normal file
174
src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
package ch.psi.imagej.hdf5;
|
||||||
|
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import ncsa.hdf.object.Dataset;
|
||||||
|
import ncsa.hdf.object.h5.H5File;
|
||||||
|
import ij.ImageStack;
|
||||||
|
import ij.process.ByteProcessor;
|
||||||
|
import ij.process.ColorProcessor;
|
||||||
|
import ij.process.FloatProcessor;
|
||||||
|
import ij.process.ImageProcessor;
|
||||||
|
import ij.process.ShortProcessor;
|
||||||
|
|
||||||
|
public class VirtualStackHDF5 extends ImageStack {
|
||||||
|
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(VirtualStackHDF5.class.getName());
|
||||||
|
|
||||||
|
private int bitDepth = 0;
|
||||||
|
private Dataset dataset;
|
||||||
|
private H5File file;
|
||||||
|
|
||||||
|
public VirtualStackHDF5(H5File file, Dataset dataset){
|
||||||
|
super((int) dataset.getDims()[2], (int) dataset.getDims()[1]);
|
||||||
|
this.dataset = dataset;
|
||||||
|
this.file = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does noting. */
|
||||||
|
public void addSlice(String sliceLabel, Object pixels) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does nothing.. */
|
||||||
|
public void addSlice(String sliceLabel, ImageProcessor ip) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does noting. */
|
||||||
|
public void addSlice(String sliceLabel, ImageProcessor ip, int n) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does noting. */
|
||||||
|
public void deleteSlice(int n) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does noting. */
|
||||||
|
public void deleteLastSlice() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getPixels(int slice) {
|
||||||
|
try {
|
||||||
|
long[] dimensions = dataset.getDims();
|
||||||
|
|
||||||
|
// Select what to readout
|
||||||
|
long[] selected = dataset.getSelectedDims();
|
||||||
|
selected[0] = 1;
|
||||||
|
selected[1] = dimensions[1];
|
||||||
|
selected[2] = dimensions[2];
|
||||||
|
|
||||||
|
long[] start = dataset.getStartDims();
|
||||||
|
start[0] = slice-1; // Indexing at image J starts at 1
|
||||||
|
|
||||||
|
Object wholeDataset = dataset.read();
|
||||||
|
|
||||||
|
if (wholeDataset instanceof byte[]) {
|
||||||
|
return (byte[]) wholeDataset;
|
||||||
|
} else if (wholeDataset instanceof short[]) {
|
||||||
|
return (short[]) wholeDataset;
|
||||||
|
} else if (wholeDataset instanceof int[]) {
|
||||||
|
return HDF5Utilities.convertToFloat((int[]) wholeDataset);
|
||||||
|
} else if (wholeDataset instanceof long[]) {
|
||||||
|
return HDF5Utilities.convertToFloat((long[]) wholeDataset);
|
||||||
|
} else if (wholeDataset instanceof float[]) {
|
||||||
|
return (float[]) wholeDataset;
|
||||||
|
} else if (wholeDataset instanceof double[]) {
|
||||||
|
return HDF5Utilities.convertToFloat((double[]) wholeDataset);
|
||||||
|
} else {
|
||||||
|
logger.warning("Datatype not supported");
|
||||||
|
}
|
||||||
|
} catch (OutOfMemoryError | Exception e) {
|
||||||
|
logger.log(Level.WARNING, "Unable to open slice", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assigns a pixel array to the specified slice, were 1<=n<=nslices.
|
||||||
|
*/
|
||||||
|
public void setPixels(Object pixels, int n) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an ImageProcessor for the specified slice, were 1<=n<=nslices.
|
||||||
|
* Returns null if the stack is empty.
|
||||||
|
*/
|
||||||
|
public ImageProcessor getProcessor(int slice) {
|
||||||
|
|
||||||
|
long[] dimensions = dataset.getDims();
|
||||||
|
final Object pixels = getPixels(slice);
|
||||||
|
|
||||||
|
// Todo support more ImageProcessor types
|
||||||
|
ImageProcessor ip;
|
||||||
|
|
||||||
|
if (pixels instanceof byte[]){
|
||||||
|
ip = new ByteProcessor((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
}
|
||||||
|
else if (pixels instanceof short[]){
|
||||||
|
ip = new ShortProcessor((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
}
|
||||||
|
else if (pixels instanceof int[]){
|
||||||
|
ip = new ColorProcessor((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
}
|
||||||
|
else if (pixels instanceof float[]){
|
||||||
|
ip = new FloatProcessor((int) dimensions[2], (int) dimensions[1]);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new IllegalArgumentException("Unknown stack type");
|
||||||
|
}
|
||||||
|
|
||||||
|
ip.setPixels(pixels);
|
||||||
|
return ip;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the number of slices in this stack. */
|
||||||
|
public int getSize() {
|
||||||
|
return (int) this.dataset.getDims()[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the label of the Nth image. */
|
||||||
|
public String getSliceLabel(int slice) {
|
||||||
|
return "Slice: "+slice;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns null. */
|
||||||
|
public Object[] getImageArray() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does nothing. */
|
||||||
|
public void setSliceLabel(String label, int n) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Always return true. */
|
||||||
|
public boolean isVirtual() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Does nothing. */
|
||||||
|
public void trim() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the bit depth (8, 16, 24 or 32), or 0 if the bit depth is not
|
||||||
|
* known.
|
||||||
|
*/
|
||||||
|
public int getBitDepth() {
|
||||||
|
return bitDepth;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close HDF5 file
|
||||||
|
*/
|
||||||
|
public void close() {
|
||||||
|
logger.info("Closing HDF5 file");
|
||||||
|
try{
|
||||||
|
file.close();
|
||||||
|
}
|
||||||
|
catch(Exception e){
|
||||||
|
logger.log(Level.WARNING, "Unable to close HDF5 file", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user