slimmed code ...

This commit is contained in:
ebner 2014-10-01 07:29:01 +02:00
parent 11e891e8d4
commit 133796730a
3 changed files with 195 additions and 652 deletions

View File

@ -1,25 +1,5 @@
package ch.psi.imagej.hdf5;
/*
* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* =========================================================================
*/
import ij.IJ;
import ij.ImagePlus;
import ij.CompositeImage;
@ -29,6 +9,7 @@ import ij.io.OpenDialog;
import ij.plugin.PlugIn;
import java.io.File;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -49,7 +30,6 @@ public class HDF5Reader implements PlugIn {
r.run("");
}
public void run(String arg) {
String directory = "";
@ -124,13 +104,12 @@ public class HDF5Reader implements PlugIn {
wholeDataset = checkUnsigned(datatype, wholeDataset);
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
int stackSize = (int) (dimensions[2] * dimensions[3]);
int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
int singleVolumeSize = (int) (dimensions[1] * stackSize);
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
for (int lev = 0; lev < dimensions[1]; ++lev) {
int startIdx = (volIDX * singleVolumeSize * 3) + (lev * stackSize * 3);
int endIdx = startIdx + stackSize * 3 - 1;
copyPixels4D_RGB(datatypeIfUnsupported, (int) dimensions[2], (int) dimensions[3], stack, wholeDataset, (int) stackSize, startIdx, endIdx);
int startIdx = (volIDX * singleVolumeSize * 3) + (lev * stackSize);
addSliceRGB(stack, wholeDataset, (int) dimensions[2], (int) dimensions[3], startIdx);
}
}
@ -156,12 +135,10 @@ public class HDF5Reader implements PlugIn {
wholeDataset = checkUnsigned(datatype, wholeDataset);
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
long stackSize = dimensions[1] * dimensions[2] * 3;
int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
for (int lev = 0; lev < dimensions[0]; ++lev) {
int startIdx = (int) (lev * stackSize);
int endIdx = (int) (startIdx + stackSize - 1);
int size = (int) (dimensions[2] * dimensions[1]);
copyPixels3D_RGB((int) dimensions[1], (int) dimensions[2], stack, wholeDataset, size, startIdx, endIdx);
int startIdx = lev * stackSize;
addSliceRGB( stack, wholeDataset, (int) dimensions[1], (int) dimensions[2], startIdx);
}
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
@ -216,7 +193,7 @@ public class HDF5Reader implements PlugIn {
wholeDataset = checkUnsigned(datatype, wholeDataset);
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
copyPixels2D_RGB((int) dimensions[0], (int) dimensions[1], stack, wholeDataset, (int) (dimensions[1] * dimensions[0]));
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
ImagePlus imp = new ImagePlus(directory + name + " " + datasetName, stack);
imp.setDimensions(3, 1, 1);
@ -509,396 +486,53 @@ public class HDF5Reader implements PlugIn {
/**
* @param datatypeIfUnsupported
* @param extent
* @param nRows (extent[2])
* @param nColumns (extent[3])
* Add RGB slice to stack
* @param stack
* @param wholeDataset
* @param size
* @param startIdx
* @param endIdx
*/
private void copyPixels4D_RGB(Datatype datatypeIfUnsupported, int nRows, int nColumns, ImageStack stack, Object wholeDataset, int size, int startIdx, int endIdx) {
if (wholeDataset instanceof byte[]) {
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
byte[] rChannel = new byte[size];
byte[] gChannel = new byte[size];
byte[] bChannel = new byte[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof short[]) {
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
short[] rChannel = new short[size];
short[] gChannel = new short[size];
short[] bChannel = new short[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof int[]) {
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
float[] tmp = HDF5Utilities.convertToFloat(Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx));
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
short[] tmp = HDF5Utilities.convertToShort(Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx));
short[] rChannel = new short[size];
short[] gChannel = new short[size];
short[] bChannel = new short[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
}
} else if (wholeDataset instanceof long[]) {
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_FLOAT) {
float[] tmp = HDF5Utilities.convertToFloat(Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx));
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
}
if (datatypeIfUnsupported.getDatatypeClass() == Datatype.CLASS_INTEGER) {
short[] tmp = HDF5Utilities.convertToShort(Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx));
short[] rChannel = new short[size];
short[] gChannel = new short[size];
short[] bChannel = new short[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
}
} else if (wholeDataset instanceof float[]) {
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof double[]) {
float[] tmp = HDF5Utilities.convertToFloat(Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx));
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nRows * 3) + (col * 3);
int offset = (row * nRows) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else {
logger.warning("Datatype not supported");
}
}
/**
* @param nRows (extent[0])
* @param nColumns (extent[1])
* @param stack
* @param wholeDataset
* @param size
*/
private void copyPixels2D_RGB(int nRows, int nColumns, ImageStack stack, Object wholeDataset, int size) {
if (wholeDataset instanceof byte[]) {
byte[] tmp = (byte[]) wholeDataset;
byte[] rChannel = new byte[size];
byte[] gChannel = new byte[size];
byte[] bChannel = new byte[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof short[]) {
short[] tmp = (short[]) wholeDataset;
short[] rChannel = new short[size];
short[] gChannel = new short[size];
short[] bChannel = new short[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof int[]) {
int[] tmp = (int[]) wholeDataset;
int[] rChannel = new int[size];
int[] gChannel = new int[size];
int[] bChannel = new int[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof long[]) {
long[] tmp = (long[]) wholeDataset;
long[] rChannel = new long[size];
long[] gChannel = new long[size];
long[] bChannel = new long[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof float[]) {
float[] tmp = (float[]) wholeDataset;
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof double[]) {
double[] tmp = (double[]) wholeDataset;
double[] rChannel = new double[size];
double[] gChannel = new double[size];
double[] bChannel = new double[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
}
}
/**
* @param nRows
* @param nColumns
* @param stack
* @param wholeDataset
* @param startIdx
* @param endIdx
* @param size
*/
private void copyPixels3D_RGB(int nRows, int nColumns, ImageStack stack, Object wholeDataset, int size, int startIdx, int endIdx) {
if (wholeDataset instanceof byte[]) {
byte[] tmp = Arrays.copyOfRange((byte[]) wholeDataset, startIdx, endIdx);
byte[] rChannel = new byte[size];
byte[] gChannel = new byte[size];
byte[] bChannel = new byte[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof short[]) {
short[] tmp = Arrays.copyOfRange((short[]) wholeDataset, startIdx, endIdx);
short[] rChannel = new short[size];
short[] gChannel = new short[size];
short[] bChannel = new short[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof int[]) {
int[] tmp = Arrays.copyOfRange((int[]) wholeDataset, startIdx, endIdx);
int[] rChannel = new int[size];
int[] gChannel = new int[size];
int[] bChannel = new int[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof long[]) {
long[] tmp = Arrays.copyOfRange((long[]) wholeDataset, startIdx, endIdx);
long[] rChannel = new long[size];
long[] gChannel = new long[size];
long[] bChannel = new long[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof float[]) {
float[] tmp = Arrays.copyOfRange((float[]) wholeDataset, startIdx, endIdx);
float[] rChannel = new float[size];
float[] gChannel = new float[size];
float[] bChannel = new float[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
} else if (wholeDataset instanceof double[]) {
double[] tmp = Arrays.copyOfRange((double[]) wholeDataset, startIdx, endIdx);
double[] rChannel = new double[size];
double[] gChannel = new double[size];
double[] bChannel = new double[size];
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
rChannel[offset] = tmp[offsetRGB + 0];
gChannel[offset] = tmp[offsetRGB + 1];
bChannel[offset] = tmp[offsetRGB + 2];
}
}
stack.addSlice(null, rChannel);
stack.addSlice(null, gChannel);
stack.addSlice(null, bChannel);
private void addSliceRGB(ImageStack stack, Object wholeDataset, int nRows, int nColumns, int startIdx) {
if(wholeDataset.getClass().isArray()){
int size = nRows*nColumns;
Object copy = Array.newInstance(wholeDataset.getClass().getComponentType(), size);
System.arraycopy(wholeDataset, startIdx, copy, 0, size);
addSliceRGB(stack, copy, nRows, nColumns);
}
}
/**
* Add RGB slice to stack
* @param stack
* @param wholeDataset
* @param nRows
* @param nColumns
*/
private void addSliceRGB(ImageStack stack, Object wholeDataset, int nRows, int nColumns) {
int size = nRows*nColumns;
Class<?> type = wholeDataset.getClass().getComponentType();
Object r = Array.newInstance(type, size);
Object g = Array.newInstance(type, size);
Object b = Array.newInstance(type, size);
for (int row = 0; row < nRows; ++row) {
for (int col = 0; col < nColumns; ++col) {
int offsetRGB = (row * nColumns * 3) + (col * 3);
int offset = (row * nColumns) + col;
Array.set(r, offset,Array.get(wholeDataset,offsetRGB + 0));
Array.set(g, offset,Array.get(wholeDataset,offsetRGB + 1));
Array.set(b, offset,Array.get(wholeDataset,offsetRGB + 2));
}
}
stack.addSlice(null, r);
stack.addSlice(null, g);
stack.addSlice(null, b);
}
/**
* Add AWT scroll bars to the given container.
*/

View File

@ -104,6 +104,16 @@ public class HDF5Utilities {
return dataSetName.substring(posOfLastSlash + 1);
}
/**
* Create group
* @param file
* @param groupName
* @return
*/
public static Group createGroup( FileFormat file, String groupName) {
return createGroup(file, (Group) ((DefaultMutableTreeNode) file.getRootNode()).getUserObject(), groupName);
}
/**
* Creates group recursively relative to the given base group
*
@ -112,8 +122,8 @@ public class HDF5Utilities {
* @param file File handle
* @return
*/
public static Group createGroup(String groupRelativName, Group group, FileFormat file) {
if (groupRelativName == null || file == null)
public static Group createGroup( FileFormat file, Group group, String groupName) {
if (groupName == null || file == null)
return null;
if (group == null){
@ -121,24 +131,24 @@ public class HDF5Utilities {
}
// Trim leading and trailing slashes
while (groupRelativName.charAt(0) == '/') {
groupRelativName = groupRelativName.substring(1);
while (groupName.charAt(0) == '/') {
groupName = groupName.substring(1);
}
while (groupRelativName.charAt(groupRelativName.length() - 1) == '/') {
groupRelativName = groupRelativName.substring(0, groupRelativName.length() - 2);
while (groupName.charAt(groupName.length() - 1) == '/') {
groupName = groupName.substring(0, groupName.length() - 2);
}
int posOfSlash = groupRelativName.indexOf('/');
int posOfSlash = groupName.indexOf('/');
if (posOfSlash == -1) {
try {
Group newGroup;
String newGroupName;
if (group.isRoot()){
newGroupName = "/" + groupRelativName;
newGroupName = "/" + groupName;
}
else{
newGroupName = group.getFullName() + "/" + groupRelativName;
newGroupName = group.getFullName() + "/" + groupName;
}
newGroup = (Group) file.get(newGroupName);
if (newGroup == null){
@ -149,8 +159,8 @@ public class HDF5Utilities {
return null;
}
} else {
String subgroupRelativName = groupRelativName.substring(posOfSlash);
String currentGroup = groupRelativName.substring(0, posOfSlash);
String subgroupRelativName = groupName.substring(posOfSlash);
String currentGroup = groupName.substring(0, posOfSlash);
logger.info("Create: " + currentGroup);
logger.info("Call back for: " + subgroupRelativName);
try {
@ -170,7 +180,7 @@ public class HDF5Utilities {
newGroup = file.createGroup(newGroupName, group);
}
return createGroup(subgroupRelativName, newGroup, file);
return createGroup(file, newGroup, subgroupRelativName );
} catch (Exception e) {
return null;
}

View File

@ -1,24 +1,5 @@
package ch.psi.imagej.hdf5;
/*
* =========================================================================
*
* Copyright 2011 Matthias Schlachter
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* =========================================================================
*/
import ij.*;
import ij.io.*;
@ -26,6 +7,7 @@ import ij.plugin.filter.PlugInFilter;
import ij.process.*;
import ij.gui.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import ncsa.hdf.object.*; // the common object package
@ -33,7 +15,7 @@ import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5Writer implements PlugInFilter {
private static final Logger logger = Logger.getLogger(HDF5Writer.class.getName());
public int setup(String arg, ImagePlus imp) {
@ -41,7 +23,6 @@ public class HDF5Writer implements PlugInFilter {
return DOES_8G + DOES_16 + DOES_32 + DOES_RGB + NO_CHANGES;
}
public void run(ImageProcessor ip) {
// Check whether windows are open
@ -54,11 +35,11 @@ public class HDF5Writer implements PlugInFilter {
SaveDialog sd = new SaveDialog("Save HDF5 ...", "", ".h5");
String directory = sd.getDirectory();
String name = sd.getFileName();
if (name == null || name.equals("")){
return;
if (name == null || name.equals("")) {
return;
}
String filename = directory + name;
// Retrieve an instance of the implementing class for the HDF5 format
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
@ -68,7 +49,6 @@ public class HDF5Writer implements PlugInFilter {
return;
}
ImagePlus imp = WindowManager.getCurrentImage();
int nFrames = imp.getNFrames();
int nChannels = imp.getNChannels();
@ -78,7 +58,7 @@ public class HDF5Writer implements PlugInFilter {
int nCols = imp.getWidth();
int imgColorDepth = imp.getBitDepth();
int imgColorType = imp.getType();
Datatype type = null;
if (imgColorType == ImagePlus.GRAY8) {
logger.info(" bit depth: " + imgColorDepth + ", type: GRAY8");
@ -93,124 +73,91 @@ public class HDF5Writer implements PlugInFilter {
logger.info(" bit depth: " + imgColorDepth + ", type: COLOR_RGB");
type = new H5Datatype(Datatype.CLASS_CHAR, Datatype.NATIVE, Datatype.NATIVE, Datatype.SIGN_NONE);
}
GenericDialog gd = null;
gd = new GenericDialog("Variable Name Selection");
// check for hyperstack
if (imp.getOpenAsHyperStack() || imp.isHyperStack()) {
logger.info("This is a hyperstack");
// Hyperstack
GenericDialog gd = new GenericDialog("Dataset Name");
gd.addStringField(imp.getTitle(), "/t$F/channel$C");
gd.showDialog();
if (gd.wasCanceled()) {
IJ.error("Plugin canceled!");
return;
}
String formatString = gd.getNextString();
// Open the file
try {
H5File outFile = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite()) {
H5File file = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!file.canWrite()) {
IJ.error("File `" + filename + "`is readonly!");
return;
}
outFile.open();
file.open();
// Split frames and channels
// parse format string
// regexp
long[] channelDims = null;
if (nSlices > 1) {
channelDims = new long[3];
channelDims[0] = nSlices;
channelDims[1] = nRows;
channelDims[2] = nCols;
} else {
channelDims = new long[2];
channelDims[0] = nRows;
channelDims[1] = nCols;
}
// iterate over frames and channels
ImageStack stack = imp.getStack();
for (int f = 0; f < nFrames; f++) {
IJ.showProgress(f, nFrames);
for (int c = 0; c < nChannels; c++) {
String fullName = formatString;
fullName = fullName.replaceAll("$F", f+"");
fullName = fullName.replaceAll("$C", c+"");
long[] dimensions = null;
if (nSlices > 1) {
dimensions = new long[3];
dimensions[0] = nSlices;
dimensions[1] = nRows;
dimensions[2] = nCols;
} else {
dimensions = new long[2];
dimensions[0] = nRows;
dimensions[1] = nCols;
}
String dataSetName = HDF5Utilities.getDataSetDescriptor(fullName);
String groupName = HDF5Utilities.getGroupDescriptor(fullName);
logger.info("group name: " + groupName + " dataset name: " + dataSetName);
// ensure group exists
Group group = HDF5Utilities.createGroup(groupName, null, outFile);
// create data set
Dataset dataset = null;
// select hyperslabs
long[] maxdims = channelDims;
// iterate over frames and channels
ImageStack stack = imp.getStack();
for (int f = 0; f < nFrames; f++) {
for (int c = 0; c < nChannels; c++) {
String fullName = formatString;
fullName = fullName.replaceAll("$F", f + "");
fullName = fullName.replaceAll("$C", c + "");
String dataSetName = HDF5Utilities.getDataSetDescriptor(fullName);
String groupName = HDF5Utilities.getGroupDescriptor(fullName);
// Ensure group exists
Group group = HDF5Utilities.createGroup(file, groupName);
// Create dataset
Dataset dataset = null;
try {
dataset = (Dataset) file.get(groupName + "/" + dataSetName);
} catch (Exception e) {
dataset = null;
}
if (dataset == null) {
long[] maxdims = dimensions;
long[] chunks = null;
int gzip = 0; // no compression
try {
dataset = (Dataset) outFile.get(groupName + "/" + dataSetName);
} catch (Exception e) {
dataset = null;
}
if (dataset == null) {
try {
dataset = outFile.createScalarDS(dataSetName, group, type, channelDims, maxdims, chunks, gzip, null);
} catch (Exception err) {
IJ.error(err.getMessage());
return;
}
}
dataset.init();
long[] selected = dataset.getSelectedDims(); // the
// selected
// size of
// the
// dataet
// write levels
dataset = file.createScalarDS(dataSetName, group, type, dimensions, maxdims, chunks, gzip, null);
}
dataset.init();
long[] selected = dataset.getSelectedDims(); // the
if (nSlices == 1) {
System.arraycopy(dimensions, 0, selected, 0, selected.length);
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
Object slice = stack.getPixels(stackIndex);
dataset.write(slice);
} else {
selected[0] = 1;
System.arraycopy(dimensions, 1, selected, 1, selected.length - 1);
long[] start = dataset.getStartDims();
for (int lvl = 0; lvl < nSlices; ++lvl) {
start[0] = lvl;
int stackIndex = imp.getStackIndex(c + 1, lvl + 1, f + 1);
logger.info("selected.length: " + Integer.toString(selected.length));
logger.info("channelDims.length: " + Integer.toString(channelDims.length));
if (nSlices == 1) {
System.arraycopy(channelDims, 0, selected, 0, selected.length);
int stackIndex = imp.getStackIndex(c + 1, 1, f + 1);
logger.info("Stackindex: " + Integer.toString(stackIndex));
// get raw data
Object slice = stack.getPixels(stackIndex);
assert (slice != null);
// write data
try {
dataset.write(slice);
} catch (Exception e) {
IJ.showStatus("Error writing data to file.");
}
} else {
selected[0] = 1;
System.arraycopy(channelDims, 1, selected, 1, selected.length - 1);
long[] start = dataset.getStartDims();
for (int lvl = 0; lvl < nSlices; ++lvl) {
// select hyperslab
start[0] = lvl;
int stackIndex = imp.getStackIndex(c + 1, lvl + 1, f + 1);
// get raw data
Object slice = stack.getPixels(stackIndex);
// write data
try {
dataset.write(slice);
} catch (Exception e) {
IJ.showStatus("Error writing data to file.");
}
}
dataset.write(slice);
}
}
}
outFile.close();
}
file.close();
} catch (HDF5Exception err) {
IJ.error(err.getMessage());
return;
@ -219,27 +166,25 @@ public class HDF5Writer implements PlugInFilter {
return;
}
} else {
logger.info("This is NO hyperstack");
// No Hyperstack
GenericDialog gd = new GenericDialog("Dataset Name");
gd.addStringField(imp.getTitle(), "");
gd.showDialog();
if (gd.wasCanceled()) {
IJ.error("Plugin canceled!");
return;
}
String varName = gd.getNextString();
if (varName == "") {
IJ.error("No data set name given. Plugin canceled!");
return;
}
try {
H5File outFile = null;
H5File file = null;
try {
outFile = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!outFile.canWrite()) {
file = (H5File) fileFormat.createFile(filename, FileFormat.FILE_CREATE_OPEN);
if (!file.canWrite()) {
IJ.error("File `" + filename + "`is readonly!");
return;
}
@ -247,150 +192,104 @@ public class HDF5Writer implements PlugInFilter {
IJ.error(err.getMessage());
return;
}
file.open();
outFile.open();
// first create all dimensions and variables
// Image color depth and color type
logger.info("writing data to variable: " + varName);
String dataSetName = HDF5Utilities.getDataSetDescriptor(varName);
String datasetName = HDF5Utilities.getDataSetDescriptor(varName);
String groupName = HDF5Utilities.getGroupDescriptor(varName);
logger.info("group name: " + groupName + " dataset name: " + dataSetName);
// ensure group exists
Group group = HDF5Utilities.createGroup(groupName, null, outFile);
// Ensure group exists
Group group = HDF5Utilities.createGroup(file, groupName);
// get image type (bit depth)
long[] dims;
long[] dimensions;
if (imgColorType == ImagePlus.COLOR_RGB || imgColorType == ImagePlus.COLOR_256) {
if (stackSize == 1) {
// color image
dims = new long[3];
dims[0] = nRows;
dims[1] = nCols;
dims[2] = 3;
dimensions = new long[3];
dimensions[0] = nRows;
dimensions[1] = nCols;
dimensions[2] = 3;
} else {
// color images have 4 dimensions, grey value images
// have 3.
logger.info("adding 4 dimensions");
dims = new long[4];
dims[0] = stackSize;
dims[1] = nRows;
dims[2] = nCols;
dims[3] = 3;
dimensions = new long[4];
dimensions[0] = stackSize;
dimensions[1] = nRows;
dimensions[2] = nCols;
dimensions[3] = 3;
}
} else {
if (stackSize == 1) {
// color image
dims = new long[2];
dims[0] = nRows;
dims[1] = nCols;
dimensions = new long[2];
dimensions[0] = nRows;
dimensions[1] = nCols;
} else {
logger.info("adding 3 dimensions");
dims = new long[3];
dims[0] = stackSize;
dims[1] = nRows;
dims[2] = nCols;
dimensions = new long[3];
dimensions[0] = stackSize;
dimensions[1] = nRows;
dimensions[2] = nCols;
}
}
// select hyperslabs
long[] maxdims = dims;
// long[] chunks = findOptimalChunksize( nDims,
// dims);
long[] chunks = null;
int gzip = 0; // no compression
// create dataset
// Create dataset
Dataset dataset = null;
try {
dataset = (Dataset) outFile.get(groupName + "/" + dataSetName);
dataset = (Dataset) file.get(groupName + "/" + datasetName);
} catch (Exception e) {
dataset = null;
}
if (dataset == null) {
dataset = outFile.createScalarDS(dataSetName, group, type, dims, maxdims, chunks, gzip, null);
long[] maxdims = dimensions;
long[] chunks = null;
int gzip = 0; // no compression
dataset = file.createScalarDS(datasetName, group, type, dimensions, maxdims, chunks, gzip, null);
}
dataset.init();
long[] selected = dataset.getSelectedDims(); // the
// selected
// size of
// the
// dataet
long[] selected = dataset.getSelectedDims();
ImageStack stack = imp.getStack();
if (stackSize == 1) {
System.arraycopy(dims, 0, selected, 0, selected.length);
// get raw data
System.arraycopy(dimensions, 0, selected, 0, selected.length);
Object slice = stack.getPixels(stackSize);
if (imgColorType == ImagePlus.COLOR_RGB)
slice = computeRgbSlice(stack.getPixels(stackSize));
// write data
if (imgColorType == ImagePlus.COLOR_RGB) {
slice = computeRgbSlice((int[]) stack.getPixels(stackSize));
}
dataset.write(slice);
} else {
selected[0] = 1;
System.arraycopy(dims, 1, selected, 1, selected.length - 1);
long[] start = dataset.getStartDims(); // the off set of
// the selection
System.arraycopy(dimensions, 1, selected, 1, selected.length - 1);
long[] start = dataset.getStartDims(); // the off set of the
// selection
for (int lvl = 0; lvl < stackSize; ++lvl) {
IJ.showProgress(lvl, stackSize);
// select hyperslab
start[0] = lvl;
// get raw data
Object slice = stack.getPixels(lvl + 1);
if (imgColorType == ImagePlus.COLOR_RGB)
slice = computeRgbSlice(stack.getPixels(lvl + 1));
// write data
if (imgColorType == ImagePlus.COLOR_RGB) {
slice = computeRgbSlice((int[]) stack.getPixels(lvl + 1));
}
dataset.write(slice);
}
}
// get pixel sizes
ij.measure.Calibration cal = imp.getCalibration();
logger.info(" Element-Size in um (level,row,col): " + cal.pixelDepth + ", " + cal.pixelHeight + ", " + cal.pixelWidth);
float[] element_sizes = new float[3];
element_sizes[0] = (float) cal.pixelDepth;
element_sizes[1] = (float) cal.pixelHeight;
element_sizes[2] = (float) cal.pixelWidth;
Datatype attrType = new H5Datatype(Datatype.CLASS_FLOAT, Datatype.NATIVE, Datatype.NATIVE, -1);
long[] attrDims = { 3 };
Attribute element_size_um = null;
try {
element_size_um = HDF5Utilities.getAttributes(dataset).get("element_size_um");
} catch (Exception e) {
element_size_um = null;
}
if (element_size_um == null) {
element_size_um = new Attribute("element_size_um", attrType, attrDims);
}
element_size_um.setValue(element_sizes);
// write element_size_um
dataset.writeMetadata(element_size_um);
outFile.close();
} catch (HDF5Exception err) {
System.err.println("Caught HDF5Exception");
err.printStackTrace();
} catch (java.io.IOException err) {
System.err.println("IO Error while writing '" + filename + "': " + err);
} catch (Exception err) {
System.err.println("Range Error while writing '" + filename + "': " + err);
file.close();
} catch (HDF5Exception e) {
logger.log(Level.WARNING, "Caught HDF5Exception", e);
} catch (java.io.IOException e) {
logger.log(Level.WARNING, "IO Error while writing '" + filename + "'", e);
} catch (Exception e) {
logger.log(Level.WARNING, "Range Error while writing '" + filename + "'", e);
}
}
}
private Object computeRgbSlice(Object pixels) {
byte rgbslice[];
int size = ((int[]) pixels).length;
rgbslice = new byte[size * 3];
/**
* Compute the rgb slice
* @param pixels Original pixels
* @return Slice with separated RGB values
*/
private byte[] computeRgbSlice(int[] pixels) {
int size = pixels.length;
byte[] rgbslice = new byte[size * 3];
for (int i = 0; i < size; i++) {
int red = (((int[]) pixels)[i] & 0xff0000) >> 16;
int green = (((int[]) pixels)[i] & 0x00ff00) >> 8;