mirror of
https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5.git
synced 2025-06-07 02:50:42 +02:00
Exchanges lots of code with standard constructs
This commit is contained in:
parent
e1910dcd7f
commit
7ff965bc08
6
.project
6
.project
@ -15,9 +15,15 @@
|
|||||||
<arguments>
|
<arguments>
|
||||||
</arguments>
|
</arguments>
|
||||||
</buildCommand>
|
</buildCommand>
|
||||||
|
<buildCommand>
|
||||||
|
<name>ch.acanda.eclipse.pmd.builder.PMDBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
</buildSpec>
|
</buildSpec>
|
||||||
<natures>
|
<natures>
|
||||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||||
|
<nature>ch.acanda.eclipse.pmd.builder.PMDNature</nature>
|
||||||
</natures>
|
</natures>
|
||||||
</projectDescription>
|
</projectDescription>
|
||||||
|
@ -75,6 +75,8 @@ cd <FIJI_HOME>
|
|||||||
<fiji> --java-home /usr/lib/jvm/jre-1.7.0-openjdk.x86_64 -Djava.library.path=lib/linux64
|
<fiji> --java-home /usr/lib/jvm/jre-1.7.0-openjdk.x86_64 -Djava.library.path=lib/linux64
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
|
||||||
|
|
||||||
# Development
|
# Development
|
||||||
To create an all in one zip file for installation in a ImageJ installation use:
|
To create an all in one zip file for installation in a ImageJ installation use:
|
||||||
`mvn clean compile assembly:assembly`
|
`mvn clean compile assembly:assembly`
|
||||||
|
@ -31,7 +31,10 @@ import ij.process.ImageProcessor;
|
|||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import java.awt.*;
|
import java.awt.*;
|
||||||
@ -42,19 +45,19 @@ import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
|||||||
|
|
||||||
public class HDF5Reader implements PlugIn {
|
public class HDF5Reader implements PlugIn {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName());
|
||||||
|
|
||||||
public static void main(String[] args){
|
public static void main(String[] args){
|
||||||
HDF5Reader r = new HDF5Reader();
|
HDF5Reader r = new HDF5Reader();
|
||||||
r.run("");
|
r.run("");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(HDF5Reader.class.getName());
|
|
||||||
|
|
||||||
public void run(String arg) {
|
public void run(String arg) {
|
||||||
// make sure default values for config are written
|
// make sure default values for config are written
|
||||||
// HDF5_Config.setDefaultsIfNoValueExists();
|
// HDF5_Config.setDefaultsIfNoValueExists();
|
||||||
|
|
||||||
// run plugin
|
// Run plugin
|
||||||
String directory = "";
|
String directory = "";
|
||||||
String name = "";
|
String name = "";
|
||||||
boolean tryAgain;
|
boolean tryAgain;
|
||||||
@ -71,7 +74,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
name = od.getFileName();
|
name = od.getFileName();
|
||||||
if (name == null)
|
if (name == null)
|
||||||
return;
|
return;
|
||||||
if (name == "")
|
if (name.equals(""))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
File testFile = new File(directory + name);
|
File testFile = new File(directory + name);
|
||||||
@ -93,11 +96,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
inFile = new H5File(directory + name, H5File.READ);
|
inFile = new H5File(directory + name, H5File.READ);
|
||||||
inFile.open();
|
inFile.open();
|
||||||
|
|
||||||
|
// Parse the file
|
||||||
/*-------------------------------------------------------------------
|
|
||||||
* parse the file
|
|
||||||
*-------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) inFile.getRootNode()).getUserObject();
|
Group rootNode = (Group) ((javax.swing.tree.DefaultMutableTreeNode) inFile.getRootNode()).getUserObject();
|
||||||
List<Dataset> varList = getDataSetList(rootNode, new ArrayList<Dataset>());
|
List<Dataset> varList = getDataSetList(rootNode, new ArrayList<Dataset>());
|
||||||
|
|
||||||
@ -191,7 +190,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
logger.info("");
|
logger.info("");
|
||||||
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
|
IJ.showStatus("Reading Variable: " + var.getName() + " (" + extent[0] + " slices)");
|
||||||
|
|
||||||
Attribute elemsize_att = getAttribute(var, "element_size_um");
|
Attribute elemsize_att = getAttributes(var).get("element_size_um");
|
||||||
double[] elem_sizes = new double[3];
|
double[] elem_sizes = new double[3];
|
||||||
if (elemsize_att == null) {
|
if (elemsize_att == null) {
|
||||||
elem_sizes[0] = 1.0;
|
elem_sizes[0] = 1.0;
|
||||||
@ -219,6 +218,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
logger.info(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
|
logger.info(" Element-Size in um (level,row,col): " + elem_sizes[0] + ", " + elem_sizes[1] + ", " + elem_sizes[2]);
|
||||||
|
|
||||||
|
|
||||||
// nice gadget to update the progress bar
|
// nice gadget to update the progress bar
|
||||||
long progressDivisor = extent[0] / 50; // we assume 50 process steps
|
long progressDivisor = extent[0] / 50; // we assume 50 process steps
|
||||||
if (progressDivisor < 1)
|
if (progressDivisor < 1)
|
||||||
@ -296,6 +296,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
|
|
||||||
long stackSize = extent[2] * extent[3];
|
long stackSize = extent[2] * extent[3];
|
||||||
long singleVolumeSize = extent[1] * stackSize;
|
long singleVolumeSize = extent[1] * stackSize;
|
||||||
|
int size = (int) stackSize;
|
||||||
|
|
||||||
for (int volIDX = 0; volIDX < extent[0]; ++volIDX) {
|
for (int volIDX = 0; volIDX < extent[0]; ++volIDX) {
|
||||||
if ((volIDX % progressDivisor) == 0)
|
if ((volIDX % progressDivisor) == 0)
|
||||||
@ -306,14 +307,15 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// select hyperslab for lev
|
// select hyperslab for lev
|
||||||
// start[1] = lev;
|
// start[1] = lev;
|
||||||
// Object slice = var.read();
|
// Object slice = var.read();
|
||||||
long startIdx = (volIDX * singleVolumeSize * 3) + (lev * stackSize * 3);
|
int startIdx = (int)((volIDX * singleVolumeSize * 3) + (lev * stackSize * 3));
|
||||||
long numElements = stackSize * 3;
|
// long numElements = stackSize * 3;
|
||||||
|
int endIdx = (int)(startIdx+stackSize*3-1);
|
||||||
|
|
||||||
if (wholeDataset instanceof byte[]) {
|
if (wholeDataset instanceof byte[]) {
|
||||||
byte[] tmp = (byte[]) extractSubarray(wholeDataset, startIdx, numElements);
|
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
|
||||||
byte[] rChannel = new byte[(int) stackSize];
|
byte[] rChannel = new byte[size];
|
||||||
byte[] gChannel = new byte[(int) stackSize];
|
byte[] gChannel = new byte[size];
|
||||||
byte[] bChannel = new byte[(int) stackSize];
|
byte[] bChannel = new byte[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -327,10 +329,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (wholeDataset instanceof short[]) {
|
} else if (wholeDataset instanceof short[]) {
|
||||||
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
|
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
|
||||||
short[] rChannel = new short[(int) stackSize];
|
short[] rChannel = new short[size];
|
||||||
short[] gChannel = new short[(int) stackSize];
|
short[] gChannel = new short[size];
|
||||||
short[] bChannel = new short[(int) stackSize];
|
short[] bChannel = new short[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -345,10 +347,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (wholeDataset instanceof int[]) {
|
} else if (wholeDataset instanceof int[]) {
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
float[] tmp = convertInt32ToFloat((int[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertInt32ToFloat(Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx));
|
||||||
float[] rChannel = new float[(int) stackSize];
|
float[] rChannel = new float[size];
|
||||||
float[] gChannel = new float[(int) stackSize];
|
float[] gChannel = new float[size];
|
||||||
float[] bChannel = new float[(int) stackSize];
|
float[] bChannel = new float[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -363,10 +365,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
}
|
}
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
|
||||||
short[] tmp = convertInt32ToShort((int[]) extractSubarray(wholeDataset, startIdx, numElements));
|
short[] tmp = convertInt32ToShort(Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx));
|
||||||
short[] rChannel = new short[(int) stackSize];
|
short[] rChannel = new short[size];
|
||||||
short[] gChannel = new short[(int) stackSize];
|
short[] gChannel = new short[size];
|
||||||
short[] bChannel = new short[(int) stackSize];
|
short[] bChannel = new short[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -382,10 +384,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof long[]) {
|
} else if (wholeDataset instanceof long[]) {
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
float[] tmp = convertInt64ToFloat((long[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertInt64ToFloat(Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx));
|
||||||
float[] rChannel = new float[(int) stackSize];
|
float[] rChannel = new float[size];
|
||||||
float[] gChannel = new float[(int) stackSize];
|
float[] gChannel = new float[size];
|
||||||
float[] bChannel = new float[(int) stackSize];
|
float[] bChannel = new float[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -400,10 +402,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
}
|
}
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
|
||||||
short[] tmp = convertInt64ToShort((long[]) extractSubarray(wholeDataset, startIdx, numElements));
|
short[] tmp = convertInt64ToShort(Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx));
|
||||||
short[] rChannel = new short[(int) stackSize];
|
short[] rChannel = new short[size];
|
||||||
short[] gChannel = new short[(int) stackSize];
|
short[] gChannel = new short[size];
|
||||||
short[] bChannel = new short[(int) stackSize];
|
short[] bChannel = new short[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -418,10 +420,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof float[]) {
|
} else if (wholeDataset instanceof float[]) {
|
||||||
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
|
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
|
||||||
float[] rChannel = new float[(int) stackSize];
|
float[] rChannel = new float[size];
|
||||||
float[] gChannel = new float[(int) stackSize];
|
float[] gChannel = new float[size];
|
||||||
float[] bChannel = new float[(int) stackSize];
|
float[] bChannel = new float[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -435,10 +437,10 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (wholeDataset instanceof double[]) {
|
} else if (wholeDataset instanceof double[]) {
|
||||||
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertDoubleToFloat(Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx));
|
||||||
float[] rChannel = new float[(int) stackSize];
|
float[] rChannel = new float[size];
|
||||||
float[] gChannel = new float[(int) stackSize];
|
float[] gChannel = new float[size];
|
||||||
float[] bChannel = new float[(int) stackSize];
|
float[] bChannel = new float[size];
|
||||||
for (int row = 0; row < extent[2]; ++row) {
|
for (int row = 0; row < extent[2]; ++row) {
|
||||||
for (int col = 0; col < extent[3]; ++col) {
|
for (int col = 0; col < extent[3]; ++col) {
|
||||||
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
int offsetRGB = (row * (int) extent[2] * 3) + (col * 3);
|
||||||
@ -452,8 +454,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else {
|
} else {
|
||||||
// try to put pixels on stack
|
logger.warning("Datatype not supported");
|
||||||
stack.addSlice(null, extractSubarray(wholeDataset, startIdx, numElements));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -537,13 +538,14 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// start[0] = lev;
|
// start[0] = lev;
|
||||||
// Object slice = var.read();
|
// Object slice = var.read();
|
||||||
|
|
||||||
long startIdx = lev * stackSize;
|
int startIdx = (int)(lev * stackSize);
|
||||||
long numElements = stackSize;
|
// long numElements = stackSize;
|
||||||
Object slice = extractSubarray(wholeDataset, startIdx, numElements);
|
int endIdx = (int)(startIdx+stackSize-1);
|
||||||
|
// Object slice = extractSubarray(wholeDataset, startIdx, numElements);
|
||||||
|
|
||||||
int size = (int) (extent[2] * extent[1]);
|
int size = (int) (extent[2] * extent[1]);
|
||||||
if (slice instanceof byte[]) {
|
if (wholeDataset instanceof byte[]) {
|
||||||
byte[] tmp = (byte[]) slice;
|
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
|
||||||
byte[] rChannel = new byte[size];
|
byte[] rChannel = new byte[size];
|
||||||
byte[] gChannel = new byte[size];
|
byte[] gChannel = new byte[size];
|
||||||
byte[] bChannel = new byte[size];
|
byte[] bChannel = new byte[size];
|
||||||
@ -559,8 +561,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, rChannel);
|
stack.addSlice(null, rChannel);
|
||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (slice instanceof short[]) {
|
} else if (wholeDataset instanceof short[]) {
|
||||||
short[] tmp = (short[]) slice;
|
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
|
||||||
short[] rChannel = new short[size];
|
short[] rChannel = new short[size];
|
||||||
short[] gChannel = new short[size];
|
short[] gChannel = new short[size];
|
||||||
short[] bChannel = new short[size];
|
short[] bChannel = new short[size];
|
||||||
@ -576,8 +578,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, rChannel);
|
stack.addSlice(null, rChannel);
|
||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (slice instanceof int[]) {
|
} else if (wholeDataset instanceof int[]) {
|
||||||
int[] tmp = (int[]) slice;
|
int[] tmp = Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx);
|
||||||
int[] rChannel = new int[size];
|
int[] rChannel = new int[size];
|
||||||
int[] gChannel = new int[size];
|
int[] gChannel = new int[size];
|
||||||
int[] bChannel = new int[size];
|
int[] bChannel = new int[size];
|
||||||
@ -593,8 +595,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, rChannel);
|
stack.addSlice(null, rChannel);
|
||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (slice instanceof long[]) {
|
} else if (wholeDataset instanceof long[]) {
|
||||||
long[] tmp = (long[]) slice;
|
long[] tmp = Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx);
|
||||||
long[] rChannel = new long[size];
|
long[] rChannel = new long[size];
|
||||||
long[] gChannel = new long[size];
|
long[] gChannel = new long[size];
|
||||||
long[] bChannel = new long[size];
|
long[] bChannel = new long[size];
|
||||||
@ -610,8 +612,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, rChannel);
|
stack.addSlice(null, rChannel);
|
||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (slice instanceof float[]) {
|
} else if (wholeDataset instanceof float[]) {
|
||||||
float[] tmp = (float[]) slice;
|
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
|
||||||
float[] rChannel = new float[size];
|
float[] rChannel = new float[size];
|
||||||
float[] gChannel = new float[size];
|
float[] gChannel = new float[size];
|
||||||
float[] bChannel = new float[size];
|
float[] bChannel = new float[size];
|
||||||
@ -627,8 +629,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, rChannel);
|
stack.addSlice(null, rChannel);
|
||||||
stack.addSlice(null, gChannel);
|
stack.addSlice(null, gChannel);
|
||||||
stack.addSlice(null, bChannel);
|
stack.addSlice(null, bChannel);
|
||||||
} else if (slice instanceof double[]) {
|
} else if (wholeDataset instanceof double[]) {
|
||||||
double[] tmp = (double[]) slice;
|
double[] tmp = Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx);
|
||||||
double[] rChannel = new double[size];
|
double[] rChannel = new double[size];
|
||||||
double[] gChannel = new double[size];
|
double[] gChannel = new double[size];
|
||||||
double[] bChannel = new double[size];
|
double[] bChannel = new double[size];
|
||||||
@ -716,17 +718,18 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// select hyperslab for lev
|
// select hyperslab for lev
|
||||||
// start[1] = lev;
|
// start[1] = lev;
|
||||||
// Object slice = var.read();
|
// Object slice = var.read();
|
||||||
long startIdx = (volIDX * singleVolumeSize) + (lev * stackSize);
|
int startIdx = (int)((volIDX * singleVolumeSize) + (lev * stackSize));
|
||||||
long numElements = stackSize;
|
int endIdx = (int)(startIdx+stackSize-1);
|
||||||
|
// long numElements = stackSize;
|
||||||
|
|
||||||
if (wholeDataset instanceof byte[]) {
|
if (wholeDataset instanceof byte[]) {
|
||||||
byte[] tmp = (byte[]) extractSubarray(wholeDataset, startIdx, numElements);
|
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof short[]) {
|
} else if (wholeDataset instanceof short[]) {
|
||||||
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
|
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof int[]) {
|
} else if (wholeDataset instanceof int[]) {
|
||||||
int[] tmp = (int[]) extractSubarray(wholeDataset, startIdx, numElements);
|
int[] tmp = Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx);
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
stack.addSlice(null, convertInt32ToFloat(tmp));
|
stack.addSlice(null, convertInt32ToFloat(tmp));
|
||||||
}
|
}
|
||||||
@ -734,7 +737,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, convertInt32ToShort(tmp));
|
stack.addSlice(null, convertInt32ToShort(tmp));
|
||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof long[]) {
|
} else if (wholeDataset instanceof long[]) {
|
||||||
long[] tmp = (long[]) extractSubarray(wholeDataset, startIdx, numElements);
|
long[] tmp = Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx);
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
stack.addSlice(null, convertInt64ToFloat(tmp));
|
stack.addSlice(null, convertInt64ToFloat(tmp));
|
||||||
}
|
}
|
||||||
@ -742,14 +745,13 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, convertInt64ToShort(tmp));
|
stack.addSlice(null, convertInt64ToShort(tmp));
|
||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof float[]) {
|
} else if (wholeDataset instanceof float[]) {
|
||||||
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
|
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof double[]) {
|
} else if (wholeDataset instanceof double[]) {
|
||||||
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertDoubleToFloat(Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx));
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else {
|
} else {
|
||||||
// try to put pixels on stack
|
logger.warning("Datatype not supported");
|
||||||
stack.addSlice(null, extractSubarray(wholeDataset, startIdx, numElements));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -760,12 +762,12 @@ public class HDF5Reader implements PlugIn {
|
|||||||
int nChannels = 1;
|
int nChannels = 1;
|
||||||
int nSlices = (int) extent[1];
|
int nSlices = (int) extent[1];
|
||||||
int nFrames = (int) extent[0];
|
int nFrames = (int) extent[0];
|
||||||
Integer nFramesI = new Integer(nFrames);
|
Integer nFramesI = nFrames;
|
||||||
Integer nSlicesI = new Integer(nSlices);
|
Integer nSlicesI = nSlices;
|
||||||
logger.info("nFrames: " + nFramesI.toString());
|
logger.info("nFrames: " + nFramesI.toString());
|
||||||
logger.info("nSlices: " + nSlicesI.toString());
|
logger.info("nSlices: " + nSlicesI.toString());
|
||||||
|
|
||||||
Integer myStackSize = new Integer(stack.getSize());
|
Integer myStackSize = stack.getSize();
|
||||||
logger.info("stackSize: " + myStackSize.toString());
|
logger.info("stackSize: " + myStackSize.toString());
|
||||||
|
|
||||||
imp.setDimensions(nChannels, nSlices, nFrames);
|
imp.setDimensions(nChannels, nSlices, nFrames);
|
||||||
@ -977,16 +979,17 @@ public class HDF5Reader implements PlugIn {
|
|||||||
// start[0] = lev;
|
// start[0] = lev;
|
||||||
// Object slice = var.read();
|
// Object slice = var.read();
|
||||||
|
|
||||||
long startIdx = lev * stackSize;
|
int startIdx = (int)(lev * stackSize);
|
||||||
long numElements = stackSize;
|
int endIdx = (int)(startIdx+stackSize-1);
|
||||||
|
// long numElements = stackSize;
|
||||||
if (wholeDataset instanceof byte[]) {
|
if (wholeDataset instanceof byte[]) {
|
||||||
byte[] tmp = (byte[]) extractSubarray(wholeDataset, startIdx, numElements);
|
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof short[]) {
|
} else if (wholeDataset instanceof short[]) {
|
||||||
short[] tmp = (short[]) extractSubarray(wholeDataset, startIdx, numElements);
|
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof int[]) {
|
} else if (wholeDataset instanceof int[]) {
|
||||||
int[] tmp = (int[]) extractSubarray(wholeDataset, startIdx, numElements);
|
int[] tmp = Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx);
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
stack.addSlice(null, convertInt32ToFloat(tmp));
|
stack.addSlice(null, convertInt32ToFloat(tmp));
|
||||||
}
|
}
|
||||||
@ -994,7 +997,8 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, convertInt32ToShort(tmp));
|
stack.addSlice(null, convertInt32ToShort(tmp));
|
||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof long[]) {
|
} else if (wholeDataset instanceof long[]) {
|
||||||
long[] tmp = (long[]) extractSubarray(wholeDataset, startIdx, numElements);
|
|
||||||
|
long[] tmp = Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx);
|
||||||
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
|
||||||
stack.addSlice(null, convertInt64ToFloat(tmp));
|
stack.addSlice(null, convertInt64ToFloat(tmp));
|
||||||
}
|
}
|
||||||
@ -1002,14 +1006,13 @@ public class HDF5Reader implements PlugIn {
|
|||||||
stack.addSlice(null, convertInt64ToShort(tmp));
|
stack.addSlice(null, convertInt64ToShort(tmp));
|
||||||
}
|
}
|
||||||
} else if (wholeDataset instanceof float[]) {
|
} else if (wholeDataset instanceof float[]) {
|
||||||
float[] tmp = (float[]) extractSubarray(wholeDataset, startIdx, numElements);
|
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else if (wholeDataset instanceof double[]) {
|
} else if (wholeDataset instanceof double[]) {
|
||||||
float[] tmp = convertDoubleToFloat((double[]) extractSubarray(wholeDataset, startIdx, numElements));
|
float[] tmp = convertDoubleToFloat(Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx));
|
||||||
stack.addSlice(null, tmp);
|
stack.addSlice(null, tmp);
|
||||||
} else {
|
} else {
|
||||||
// try to put pixels on stack
|
logger.warning("Not supported array type");
|
||||||
stack.addSlice(null, extractSubarray(wholeDataset, startIdx, numElements));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
IJ.showProgress(1.f);
|
IJ.showProgress(1.f);
|
||||||
@ -1131,6 +1134,7 @@ public class HDF5Reader implements PlugIn {
|
|||||||
} catch (OutOfMemoryError o) {
|
} catch (OutOfMemoryError o) {
|
||||||
IJ.outOfMemory("Load HDF5");
|
IJ.outOfMemory("Load HDF5");
|
||||||
}
|
}
|
||||||
|
|
||||||
// make sure the file is closed after working with it
|
// make sure the file is closed after working with it
|
||||||
// FIXME: should happen in catch-part, too!
|
// FIXME: should happen in catch-part, too!
|
||||||
try {
|
try {
|
||||||
@ -1145,54 +1149,56 @@ public class HDF5Reader implements PlugIn {
|
|||||||
IJ.showProgress(1.0);
|
IJ.showProgress(1.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
private static List<Dataset> getDataSetList(Group g, List<Dataset> datasets) throws Exception {
|
* Recursively get list of all datasets in file
|
||||||
if (g == null){
|
* @param group Group to search for datasets
|
||||||
|
* @param datasets List of datasets
|
||||||
|
* @return List of datasets or null if group is null
|
||||||
|
*/
|
||||||
|
private List<Dataset> getDataSetList(Group group, List<Dataset> datasets) {
|
||||||
|
if (group == null){
|
||||||
return datasets;
|
return datasets;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<HObject> members = g.getMemberList();
|
for (HObject o: group.getMemberList()) {
|
||||||
for (HObject obj: members) {
|
if (o instanceof Dataset) {
|
||||||
if (obj instanceof Dataset) {
|
((Dataset) o).init();
|
||||||
((Dataset) obj).init();
|
datasets.add((Dataset) o);
|
||||||
datasets.add((Dataset) obj);
|
} else if (o instanceof Group) {
|
||||||
} else if (obj instanceof Group) {
|
datasets = (getDataSetList((Group) o, datasets));
|
||||||
datasets = (getDataSetList((Group) obj, datasets));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return datasets;
|
return datasets;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static List<Attribute> getAttrList(HObject ds) throws Exception {
|
/**
|
||||||
if (ds == null){
|
* Get attributes from object
|
||||||
|
* @param object Object to retrieve the attributes from
|
||||||
|
* @return Map of attributes or null if an error occurred while retrieving the attributes or the passed object is null
|
||||||
|
*/
|
||||||
|
private Map<String,Attribute> getAttributes(HObject object) {
|
||||||
|
if (object == null){
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Attribute> attributes = new ArrayList<Attribute>();
|
Map<String, Attribute> attributes = new HashMap<>();
|
||||||
List<?> members = ds.getMetadata();
|
try{
|
||||||
int n = members.size();
|
for(Object m: object.getMetadata()){
|
||||||
Metadata obj = null;
|
if(m instanceof Attribute){
|
||||||
for (int i = 0; i < n; i++) {
|
attributes.put(((Attribute) m).getName(), (Attribute) m);
|
||||||
obj = (Metadata) members.get(i);
|
}
|
||||||
if (obj instanceof Attribute) {
|
|
||||||
attributes.add((Attribute) obj);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
catch(Exception e){
|
||||||
|
logger.warning("Unable to retrieve metadata from object");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return attributes;
|
return attributes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static Attribute getAttribute(Dataset ds, String attrName) throws Exception {
|
|
||||||
for(Attribute a: getAttrList((HObject) ds)){
|
|
||||||
if (a.getName().equals(attrName)) {
|
|
||||||
return a;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private float[] convertDoubleToFloat(double[] dataIn) {
|
private float[] convertDoubleToFloat(double[] dataIn) {
|
||||||
float[] dataOut = new float[dataIn.length];
|
float[] dataOut = new float[dataIn.length];
|
||||||
for (int index = 0; index < dataIn.length; index++) {
|
for (int index = 0; index < dataIn.length; index++) {
|
||||||
@ -1201,36 +1207,37 @@ public class HDF5Reader implements PlugIn {
|
|||||||
return dataOut;
|
return dataOut;
|
||||||
}
|
}
|
||||||
|
|
||||||
private float[] convertInt32ToFloat(int[] dataIn) {
|
|
||||||
float[] dataOut = new float[dataIn.length];
|
private float[] convertInt32ToFloat(int[] array) {
|
||||||
for (int index = 0; index < dataIn.length; index++) {
|
float[] narray = new float[array.length];
|
||||||
dataOut[index] = dataIn[index];
|
for (int index = 0; index < array.length; index++) {
|
||||||
|
narray[index] = array[index];
|
||||||
}
|
}
|
||||||
return dataOut;
|
return narray;
|
||||||
}
|
}
|
||||||
|
|
||||||
private short[] convertInt32ToShort(int[] dataIn) {
|
private short[] convertInt32ToShort(int[] array) {
|
||||||
short[] dataOut = new short[dataIn.length];
|
short[] narray = new short[array.length];
|
||||||
for (int index = 0; index < dataIn.length; index++) {
|
for (int index = 0; index < array.length; index++) {
|
||||||
dataOut[index] = (short) dataIn[index];
|
narray[index] = (short) array[index];
|
||||||
}
|
}
|
||||||
return dataOut;
|
return narray;
|
||||||
}
|
}
|
||||||
|
|
||||||
private float[] convertInt64ToFloat(long[] dataIn) {
|
private float[] convertInt64ToFloat(long[] array) {
|
||||||
float[] dataOut = new float[dataIn.length];
|
float[] narray = new float[array.length];
|
||||||
for (int index = 0; index < dataIn.length; index++) {
|
for (int index = 0; index < array.length; index++) {
|
||||||
dataOut[index] = dataIn[index];
|
narray[index] = array[index];
|
||||||
}
|
}
|
||||||
return dataOut;
|
return narray;
|
||||||
}
|
}
|
||||||
|
|
||||||
private short[] convertInt64ToShort(long[] dataIn) {
|
private short[] convertInt64ToShort(long[] array) {
|
||||||
short[] dataOut = new short[dataIn.length];
|
short[] narray = new short[array.length];
|
||||||
for (int index = 0; index < dataIn.length; index++) {
|
for (int index = 0; index < array.length; index++) {
|
||||||
dataOut[index] = (short) dataIn[index];
|
narray[index] = (short) array[index];
|
||||||
}
|
}
|
||||||
return dataOut;
|
return narray;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object convertToUnsigned(Object dataIn, int unsignedConvSelec) {
|
private Object convertToUnsigned(Object dataIn, int unsignedConvSelec) {
|
||||||
@ -1258,43 +1265,6 @@ public class HDF5Reader implements PlugIn {
|
|||||||
return dataOut;
|
return dataOut;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object extractSubarray(Object data, long startIdx, long numElements) {
|
|
||||||
Object subarray = null;
|
|
||||||
|
|
||||||
if (data instanceof byte[]) {
|
|
||||||
subarray = new byte[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((byte[]) subarray)[(int) (idx - startIdx)] = ((byte[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
} else if (data instanceof short[]) {
|
|
||||||
subarray = new short[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((short[]) subarray)[(int) (idx - startIdx)] = ((short[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
} else if (data instanceof int[]) {
|
|
||||||
subarray = new int[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((int[]) subarray)[(int) (idx - startIdx)] = ((int[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
} else if (data instanceof long[]) {
|
|
||||||
subarray = new long[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((long[]) subarray)[(int) (idx - startIdx)] = ((long[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
} else if (data instanceof float[]) {
|
|
||||||
subarray = new float[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((float[]) subarray)[(int) (idx - startIdx)] = ((float[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
} else if (data instanceof double[]) {
|
|
||||||
subarray = new double[(int) numElements];
|
|
||||||
for (long idx = startIdx; idx < startIdx + numElements; idx++) {
|
|
||||||
((double[]) subarray)[(int) (idx - startIdx)] = ((double[]) data)[(int) (idx)];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return subarray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Adds AWT scroll bars to the given container. */
|
/** Adds AWT scroll bars to the given container. */
|
||||||
public static void addScrollBars(Container pane) {
|
public static void addScrollBars(Container pane) {
|
||||||
GridBagLayout layout = (GridBagLayout) pane.getLayout();
|
GridBagLayout layout = (GridBagLayout) pane.getLayout();
|
||||||
|
@ -32,14 +32,18 @@ public class TimeFrame implements Comparable<TimeFrame> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String toString() {
|
||||||
String s = "FrameIdx: " + Integer.toString(frameIndex) + "; ";
|
StringBuffer b = new StringBuffer();
|
||||||
s = s + "nChannels: " + Integer.toString(channels.size()) + "; ";
|
b.append("FrameIdx: ");
|
||||||
s = s + "channels: ";
|
b.append(frameIndex);
|
||||||
for (int i = 0; i < channels.size(); i++){
|
b.append("; nChannels: ");
|
||||||
s = s + Integer.toString(channels.get(i)) + ";";
|
b.append(channels.size());
|
||||||
|
b.append("; Channels: ");
|
||||||
|
for(Integer c: channels){
|
||||||
|
b.append(c);
|
||||||
|
b.append(";");
|
||||||
}
|
}
|
||||||
|
|
||||||
return s;
|
return b.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getNChannels() {
|
public int getNChannels() {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user