diff --git a/pom.xml b/pom.xml
index 4852c43..023f899 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
4.0.0
ch.psi
imagej.hdf5
- 0.8.0
+ 0.9.0
diff --git a/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java b/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java
index 7baf3cd..f2313d3 100644
--- a/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java
+++ b/src/main/java/ch/psi/imagej/hdf5/DatasetSelection.java
@@ -12,6 +12,7 @@ public class DatasetSelection {
private Integer slice;
// Intervall to read images
private Integer modulo;
+ private boolean virtualStack;
public List getDatasets() {
return datasets;
@@ -37,4 +38,10 @@ public class DatasetSelection {
public Integer getModulo() {
return modulo;
}
+ public void setVirtualStack(boolean virtualStack) {
+ this.virtualStack = virtualStack;
+ }
+ public boolean isVirtualStack(){
+ return this.virtualStack;
+ }
}
diff --git a/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java b/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java
index a9e435b..d32a777 100644
--- a/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java
+++ b/src/main/java/ch/psi/imagej/hdf5/HDF5Reader.java
@@ -51,6 +51,7 @@ public class HDF5Reader implements PlugIn {
// Read HDF5 file
H5File file = null;
+ boolean close = true;
try {
file = new H5File(filename, H5File.READ);
file.setMaxMembers(Integer.MAX_VALUE);
@@ -58,6 +59,8 @@ public class HDF5Reader implements PlugIn {
List datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
+ // TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
+ close=!selectedDatasets.isVirtualStack();
// TODO Remove
@@ -208,68 +211,74 @@ public class HDF5Reader implements PlugIn {
ImageStack stack;
- if(selectedDatasets.getSlice()!=null){
-
- // Select what to readout
- long[] selected = var.getSelectedDims();
- selected[0] = 1;
- selected[1] = dimensions[1];
- selected[2] = dimensions[2];
-
- long[] start = var.getStartDims();
- start[0] = selectedDatasets.getSlice();
-
- Object wholeDataset = var.read();
-
- stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
- int size = (int) (dimensions[1] * dimensions[2]);
-
-// int startIdx = selectedDatasets.getSlice() * size;
- addSlice(stack, wholeDataset, 0, size);
+ if(selectedDatasets.isVirtualStack()){
+ logger.info("Use virtual stack");
+ stack = new VirtualStackHDF5(file, var);
}
- else if(selectedDatasets.getModulo()!=null){
- logger.info("Read every "+selectedDatasets.getModulo()+" image");
- // Select what to readout
-
- stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
-
- for(int indexToRead=0;indexToRead list;
private JCheckBox checkbox;
- private JCheckBox chckbxNewCheckBox;
+ private JCheckBox checkBoxVirtualStack;
private JLabel lblSlice;
private JPanel panel;
private JTextField textField;
@@ -63,8 +63,9 @@ public class SelectionPanel extends JPanel {
checkbox = new JCheckBox("Group Datasets (2D datasets only)");
add(checkbox);
- chckbxNewCheckBox = new JCheckBox("Virtual Stack");
- add(chckbxNewCheckBox);
+ checkBoxVirtualStack = new JCheckBox("Virtual Stack");
+ checkBoxVirtualStack.setSelected(true);
+ add(checkBoxVirtualStack);
panel = new JPanel();
FlowLayout flowLayout = (FlowLayout) panel.getLayout();
@@ -102,4 +103,8 @@ public class SelectionPanel extends JPanel {
}
return null;
}
+
+ public boolean useVirtualStack(){
+ return checkBoxVirtualStack.isSelected();
+ }
}
diff --git a/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java b/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java
new file mode 100644
index 0000000..ed648a6
--- /dev/null
+++ b/src/main/java/ch/psi/imagej/hdf5/VirtualStackHDF5.java
@@ -0,0 +1,174 @@
+package ch.psi.imagej.hdf5;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import ncsa.hdf.object.Dataset;
+import ncsa.hdf.object.h5.H5File;
+import ij.ImageStack;
+import ij.process.ByteProcessor;
+import ij.process.ColorProcessor;
+import ij.process.FloatProcessor;
+import ij.process.ImageProcessor;
+import ij.process.ShortProcessor;
+
+public class VirtualStackHDF5 extends ImageStack {
+
+
+ private static final Logger logger = Logger.getLogger(VirtualStackHDF5.class.getName());
+
+ private int bitDepth = 0;
+ private Dataset dataset;
+ private H5File file;
+
+ public VirtualStackHDF5(H5File file, Dataset dataset){
+ super((int) dataset.getDims()[2], (int) dataset.getDims()[1]);
+ this.dataset = dataset;
+ this.file = file;
+ }
+
+ /** Does noting. */
+ public void addSlice(String sliceLabel, Object pixels) {
+ }
+
+ /** Does nothing.. */
+ public void addSlice(String sliceLabel, ImageProcessor ip) {
+ }
+
+ /** Does noting. */
+ public void addSlice(String sliceLabel, ImageProcessor ip, int n) {
+ }
+
+ /** Does noting. */
+ public void deleteSlice(int n) {
+ }
+
+ /** Does noting. */
+ public void deleteLastSlice() {
+ }
+
+ public Object getPixels(int slice) {
+ try {
+ long[] dimensions = dataset.getDims();
+
+ // Select what to readout
+ long[] selected = dataset.getSelectedDims();
+ selected[0] = 1;
+ selected[1] = dimensions[1];
+ selected[2] = dimensions[2];
+
+ long[] start = dataset.getStartDims();
+ start[0] = slice-1; // Indexing at image J starts at 1
+
+ Object wholeDataset = dataset.read();
+
+ if (wholeDataset instanceof byte[]) {
+ return (byte[]) wholeDataset;
+ } else if (wholeDataset instanceof short[]) {
+ return (short[]) wholeDataset;
+ } else if (wholeDataset instanceof int[]) {
+ return HDF5Utilities.convertToFloat((int[]) wholeDataset);
+ } else if (wholeDataset instanceof long[]) {
+ return HDF5Utilities.convertToFloat((long[]) wholeDataset);
+ } else if (wholeDataset instanceof float[]) {
+ return (float[]) wholeDataset;
+ } else if (wholeDataset instanceof double[]) {
+ return HDF5Utilities.convertToFloat((double[]) wholeDataset);
+ } else {
+ logger.warning("Datatype not supported");
+ }
+ } catch (OutOfMemoryError | Exception e) {
+ logger.log(Level.WARNING, "Unable to open slice", e);
+ }
+
+ return null;
+ }
+
+ /**
+ * Assigns a pixel array to the specified slice, were 1<=n<=nslices.
+ */
+ public void setPixels(Object pixels, int n) {
+ }
+
+ /**
+ * Returns an ImageProcessor for the specified slice, were 1<=n<=nslices.
+ * Returns null if the stack is empty.
+ */
+ public ImageProcessor getProcessor(int slice) {
+
+ long[] dimensions = dataset.getDims();
+ final Object pixels = getPixels(slice);
+
+ // Todo support more ImageProcessor types
+ ImageProcessor ip;
+
+ if (pixels instanceof byte[]){
+ ip = new ByteProcessor((int) dimensions[2], (int) dimensions[1]);
+ }
+ else if (pixels instanceof short[]){
+ ip = new ShortProcessor((int) dimensions[2], (int) dimensions[1]);
+ }
+ else if (pixels instanceof int[]){
+ ip = new ColorProcessor((int) dimensions[2], (int) dimensions[1]);
+ }
+ else if (pixels instanceof float[]){
+ ip = new FloatProcessor((int) dimensions[2], (int) dimensions[1]);
+ }
+ else {
+ throw new IllegalArgumentException("Unknown stack type");
+ }
+
+ ip.setPixels(pixels);
+ return ip;
+ }
+
+ /** Returns the number of slices in this stack. */
+ public int getSize() {
+ return (int) this.dataset.getDims()[0];
+ }
+
+ /** Returns the label of the Nth image. */
+ public String getSliceLabel(int slice) {
+ return "Slice: "+slice;
+ }
+
+ /** Returns null. */
+ public Object[] getImageArray() {
+ return null;
+ }
+
+ /** Does nothing. */
+ public void setSliceLabel(String label, int n) {
+ }
+
+ /** Always return true. */
+ public boolean isVirtual() {
+ return true;
+ }
+
+ /** Does nothing. */
+ public void trim() {
+ }
+
+ /**
+ * Returns the bit depth (8, 16, 24 or 32), or 0 if the bit depth is not
+ * known.
+ */
+ public int getBitDepth() {
+ return bitDepth;
+ }
+
+ /**
+ * Close HDF5 file
+ */
+ public void close() {
+ logger.info("Closing HDF5 file");
+ try{
+ file.close();
+ }
+ catch(Exception e){
+ logger.log(Level.WARNING, "Unable to close HDF5 file", e);
+ }
+
+ }
+}