8 Commits

Author SHA1 Message Date
2372bf786f Update readme 2017-08-28 17:21:17 +02:00
cf0101a25d Increment version to 0.11.0 2017-08-28 17:15:37 +02:00
3c9fd2b97e Add quick and dirty hack
Quick and dirty hack to allow imagej (python) scripts to open hdf5 files via a routine and do operation with them.
This is far from perfect and needs major cleanup! It is implemented like this due to lack of time ...
2017-08-28 17:13:10 +02:00
eca2294000 Attemp to use plugin options 2017-08-28 16:19:28 +02:00
efc5891e2e update gitignore 2017-08-17 09:38:31 +02:00
ec38041ddf Fix artifact repository URL 2016-11-30 15:53:22 +01:00
8e5ca84a9e removed unecessary logging 2015-04-21 16:06:41 +02:00
d967b27d7c removed log messages 2015-04-21 15:15:06 +02:00
9 changed files with 150 additions and 39 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea
/target /target
.gradle .gradle
build build

View File

@ -1,4 +1,4 @@
# Overview # Overview
ImageJ plugin for reading and writing HDF5 files. ImageJ plugin for reading and writing HDF5 files.
@ -20,6 +20,16 @@ To save to an HDF5 file use:
File > SaveAs > HDF5 File > SaveAs > HDF5
``` ```
## Scripting
To use this plugin from the ImageJs' (python) scripting interface these lines can be used to open a dataset:
```python
from ch.psi.imagej.hdf5 import HDF5Reader
reader = HDF5Reader()
stack = reader.open("",False, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data_dark", True)
```
# Installation # Installation
To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM. To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM.
@ -79,6 +89,11 @@ cd <FIJI_HOME>
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`. Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
# Development # Development
To be able to run the tests and plugin from within the following arguments need to be passed:
![RunSettings](RunSettings.png)
To create an all in one zip file for installation in a ImageJ installation use: To create an all in one zip file for installation in a ImageJ installation use:
`mvn clean compile assembly:assembly` `mvn clean compile assembly:assembly`

BIN
RunSettings.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

View File

@ -2,7 +2,7 @@ apply plugin: 'java'
apply plugin: 'maven' apply plugin: 'maven'
group = 'ch.psi' group = 'ch.psi'
version = '0.8.0' version = '0.11.0'
description = """""" description = """"""
@ -13,7 +13,7 @@ targetCompatibility = 1.7
repositories { repositories {
mavenCentral() mavenCentral()
maven { url "http://slsyoke4.psi.ch:8081/artifactory/libs-releases" } maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
} }
dependencies { dependencies {
compile group: 'hdf5', name: 'hdf', version:'2.10.0' compile group: 'hdf5', name: 'hdf', version:'2.10.0'

View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId> <groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId> <artifactId>imagej.hdf5</artifactId>
<version>0.9.0</version> <version>0.11.0</version>
<dependencies> <dependencies>
<dependency> <dependency>

View File

@ -10,7 +10,7 @@ import ij.plugin.PlugIn;
import java.io.File; import java.io.File;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.List; import java.util.*;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
@ -28,45 +28,83 @@ public class HDF5Reader implements PlugIn {
*/ */
public static void main(String[] args){ public static void main(String[] args){
HDF5Reader r = new HDF5Reader(); HDF5Reader r = new HDF5Reader();
r.run(""); // r.run("");
r.open("",false, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data", true);
} }
public void run(String arg) {
open(arg, true, null, null, true);
}
/** /**
* Main function plugin * Main function plugin
* arg is a space separated list of arguments that can be passed to the run method.
* arg looks something like this: "para1=value1 para2=value2 ....."
*
* Supported arguments for arg:
* open=&lt;path&gt;
* dataset=/your/path/to/dataset
*
*/ */
public void run(String arg) { public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack) {
OpenDialog od = new OpenDialog("Open HDF5 ...", arg); // Map arguments = HDF5Reader.parseArguments(arg);
File tfile = null;
File tfile = new File(od.getDirectory() + od.getFileName()); if(interactive) {
if (!tfile.exists() || !tfile.canRead()) { OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath()); tfile = new File(od.getDirectory() + od.getFileName());
return; if (!tfile.exists() || !tfile.canRead()) {
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
return null;
}
// Overwrite filename with selected filename
filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
} }
String filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
// Read HDF5 file // Read HDF5 file
H5File file = null; H5File file = null;
boolean close = true; boolean close = true;
List<ImageStack> stacks = new ArrayList<>();
ImageStack stack = null;
try { try {
file = new H5File(filename, H5File.READ); file = new H5File(filename, H5File.READ);
file.setMaxMembers(Integer.MAX_VALUE); file.setMaxMembers(Integer.MAX_VALUE);
file.open(); file.open();
List<Dataset> datasets = HDF5Utilities.getDatasets(file); List<Dataset> datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
DatasetSelection selectedDatasets = null;
if(interactive){
logger.info("Using manual selection");
// Manual selection of the dataset and other parameters via a dialog
selectedDatasets = selectDatasets(datasets);
}
else{
logger.info("Using automatic selection");
selectedDatasets = new DatasetSelection();
for(Dataset dataset: datasets){
if(dataset.getFullName().equals(nameOfDataset)){
selectedDatasets.getDatasets().add(dataset);
break; // we only support one selection for the time being
}
}
selectedDatasets.setVirtualStack(virtualstack);
}
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end // TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
close=!selectedDatasets.isVirtualStack(); close=!selectedDatasets.isVirtualStack();
// TODO Remove // TODO Remove
// Hack as a proof of principle // Hack as a proof of principle
if(selectedDatasets.isGroup()){ if(selectedDatasets.isGroup()){
ImageStack stack = null;
for (Dataset var : selectedDatasets.getDatasets()) { for (Dataset var : selectedDatasets.getDatasets()) {
if(stack == null){ if(stack == null){
@ -81,10 +119,11 @@ public class HDF5Reader implements PlugIn {
ImagePlus imp = new ImagePlus(filename, stack); ImagePlus imp = new ImagePlus(filename, stack);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); imp.show();
return;
stacks.add(stack);
return stack; // TODO should return stacks instead of stack
} }
for (Dataset var : selectedDatasets.getDatasets()) { for (Dataset var : selectedDatasets.getDatasets()) {
// Read dataset attributes and properties // Read dataset attributes and properties
@ -111,7 +150,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]); stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int stackSize = (int) (dimensions[2] * dimensions[3] * 3); int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
int singleVolumeSize = (int) (dimensions[1] * stackSize); int singleVolumeSize = (int) (dimensions[1] * stackSize);
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) { for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -141,7 +181,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]); stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
stacks.add(stack);
int stackSize = (int) (dimensions[1] * dimensions[2] * 3); int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
for (int lev = 0; lev < dimensions[0]; ++lev) { for (int lev = 0; lev < dimensions[0]; ++lev) {
int startIdx = lev * stackSize; int startIdx = lev * stackSize;
@ -168,7 +209,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]); stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int size = (int) (dimensions[2] * dimensions[3]); int size = (int) (dimensions[2] * dimensions[3]);
long singleVolumeSize = dimensions[1] * size; long singleVolumeSize = dimensions[1] * size;
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) { for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -196,7 +238,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]); stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]); addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
@ -209,8 +252,6 @@ public class HDF5Reader implements PlugIn {
} else if (numberOfDimensions == 3) { } else if (numberOfDimensions == 3) {
logger.info("3D Image"); logger.info("3D Image");
ImageStack stack;
if(selectedDatasets.isVirtualStack()){ if(selectedDatasets.isVirtualStack()){
logger.info("Use virtual stack"); logger.info("Use virtual stack");
stack = new VirtualStackHDF5(file, var); stack = new VirtualStackHDF5(file, var);
@ -278,6 +319,7 @@ public class HDF5Reader implements PlugIn {
} }
} }
stacks.add(stack);
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); imp.show();
@ -287,7 +329,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]); stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSlice(stack, wholeDataset); addSlice(stack, wholeDataset);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
@ -319,6 +362,8 @@ public class HDF5Reader implements PlugIn {
} }
IJ.showProgress(1.0); IJ.showProgress(1.0);
return stack; // TODO should return stacks instead of stack
} }
/** /**
@ -353,7 +398,6 @@ public class HDF5Reader implements PlugIn {
return selectedDatasets; return selectedDatasets;
} }
/** /**
* Add slice to image stack * Add slice to image stack
* @param stack Stack to add slice * @param stack Stack to add slice
@ -433,4 +477,22 @@ public class HDF5Reader implements PlugIn {
stack.addSlice(null, g); stack.addSlice(null, g);
stack.addSlice(null, b); stack.addSlice(null, b);
} }
public static Map<String,String> parseArguments(String arg){
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
Map<String,String> map = new HashMap<>();
arg = arg.trim();
for(String argument: arg.split("\\s+")){
String[] entry = argument.split("=");
if(entry.length==2) {
map.put(entry[0], entry[1]);
}
else{
// ignore
logger.warning("Cannot parse argument " + argument + " - Ignore");
}
}
return map;
}
} }

View File

@ -29,32 +29,32 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override @Override
public void windowOpened(WindowEvent e) { public void windowOpened(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowIconified(WindowEvent e) { public void windowIconified(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowDeiconified(WindowEvent e) { public void windowDeiconified(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowDeactivated(WindowEvent e) { public void windowDeactivated(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowClosing(WindowEvent e) { public void windowClosing(WindowEvent e) {
logger.info("Closing"); // logger.info("Closing");
} }
@Override @Override
public void windowClosed(WindowEvent e) { public void windowClosed(WindowEvent e) {
logger.info("Closed"); // logger.info("Closed");
if(stack!=null){ if(stack!=null){
stack.close(); stack.close();
@ -63,7 +63,7 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override @Override
public void windowActivated(WindowEvent e) { public void windowActivated(WindowEvent e) {
logger.info(""); // logger.info("");
} }
}); });
} }

View File

@ -0,0 +1,22 @@
package ch.psi.imagej.hdf5;
import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
*/
public class HDF5ReaderTest {
@Test
public void parseArguments() throws Exception {
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
assertTrue(map.get("para1").equals("value1"));
assertTrue(map.get("para2").equals("value2"));
assertTrue(map.get("PARA").equals("VAL"));
}
}

View File

@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import ij.IJ;
import org.junit.Test; import org.junit.Test;
public class HDF5UtilitiesTest { public class HDF5UtilitiesTest {
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
assertEquals(gdescriptor, "three"); assertEquals(gdescriptor, "three");
} }
@Test
public void testOpen() {
IJ.run("HDF5...");
String descriptor = "/test/one/two/three";
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
System.out.println(gdescriptor);
assertEquals(gdescriptor, "three");
}
} }