10 Commits

Author SHA1 Message Date
75e490fa18 Implemented option to hide image when opening 2018-01-24 10:35:52 +01:00
70fa714aac Fixed build instructions 2018-01-24 10:33:54 +01:00
2372bf786f Update readme 2017-08-28 17:21:17 +02:00
cf0101a25d Increment version to 0.11.0 2017-08-28 17:15:37 +02:00
3c9fd2b97e Add quick and dirty hack
Quick and dirty hack to allow imagej (python) scripts to open hdf5 files via a routine and do operation with them.
This is far from perfect and needs major cleanup! It is implemented like this due to lack of time ...
2017-08-28 17:13:10 +02:00
eca2294000 Attemp to use plugin options 2017-08-28 16:19:28 +02:00
efc5891e2e update gitignore 2017-08-17 09:38:31 +02:00
ec38041ddf Fix artifact repository URL 2016-11-30 15:53:22 +01:00
8e5ca84a9e removed unecessary logging 2015-04-21 16:06:41 +02:00
d967b27d7c removed log messages 2015-04-21 15:15:06 +02:00
10 changed files with 222 additions and 46 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea
/target /target
.gradle .gradle
build build

View File

@ -1,4 +1,4 @@
# Overview # Overview
ImageJ plugin for reading and writing HDF5 files. ImageJ plugin for reading and writing HDF5 files.
@ -20,6 +20,16 @@ To save to an HDF5 file use:
File > SaveAs > HDF5 File > SaveAs > HDF5
``` ```
## Scripting
To use this plugin from the ImageJs' (python) scripting interface these lines can be used to open a dataset:
```python
from ch.psi.imagej.hdf5 import HDF5Reader
reader = HDF5Reader()
stack = reader.open("",False, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data_dark", True)
```
# Installation # Installation
To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM. To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM.
@ -79,11 +89,18 @@ cd <FIJI_HOME>
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`. Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
# Development # Development
To be able to run the tests and plugin from within the following arguments need to be passed:
![RunSettings](RunSettings.png)
To create an all in one zip file for installation in a ImageJ installation use: To create an all in one zip file for installation in a ImageJ installation use:
`mvn clean compile assembly:assembly` `mvn clean compile assembly:assembly`
The zip file contains an all in one jar as well as the required native libraries for Windows, Linux and Mac OS X. The zip file contains an all in one jar as well as the required native libraries for Windows, Linux and Mac OS X.
Note: to be able to build the package you need to have access to the PSI artifact server. Therefore this only works within the PSI networks and if you have a certain maven configuration. An example Maven settings.xml that you can copy to `~/.m2/settings.xml` is located [here](settings.xml).
# Acknowledgements # Acknowledgements
This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany ( https://code.google.com/p/imagej-hdf ) . This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany ( https://code.google.com/p/imagej-hdf ) .
It is a complete rewrite of the code with the focus on efficiency and maintainability It is a complete rewrite of the code with the focus on efficiency and maintainability

BIN
RunSettings.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

View File

@ -2,7 +2,7 @@ apply plugin: 'java'
apply plugin: 'maven' apply plugin: 'maven'
group = 'ch.psi' group = 'ch.psi'
version = '0.8.0' version = '0.11.0'
description = """""" description = """"""
@ -13,7 +13,7 @@ targetCompatibility = 1.7
repositories { repositories {
mavenCentral() mavenCentral()
maven { url "http://slsyoke4.psi.ch:8081/artifactory/libs-releases" } maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
} }
dependencies { dependencies {
compile group: 'hdf5', name: 'hdf', version:'2.10.0' compile group: 'hdf5', name: 'hdf', version:'2.10.0'

View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId> <groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId> <artifactId>imagej.hdf5</artifactId>
<version>0.9.0</version> <version>0.12.0</version>
<dependencies> <dependencies>
<dependency> <dependency>

44
settings.xml Normal file
View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<profiles>
<profile>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</repository>
<repository>
<snapshots />
<id>snapshots</id>
<name>libs-snapshots</name>
<url>http://artifacts.psi.ch/artifactory/libs-snapshots</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
<pluginRepository>
<snapshots />
<id>snapshots</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
</pluginRepositories>
<id>artifactory</id>
</profile>
</profiles>
<activeProfiles>
<activeProfile>artifactory</activeProfile>
</activeProfiles>
</settings>

View File

@ -10,7 +10,7 @@ import ij.plugin.PlugIn;
import java.io.File; import java.io.File;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.List; import java.util.*;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
@ -28,45 +28,88 @@ public class HDF5Reader implements PlugIn {
*/ */
public static void main(String[] args){ public static void main(String[] args){
HDF5Reader r = new HDF5Reader(); HDF5Reader r = new HDF5Reader();
r.run(""); // r.run("");
r.open("",false, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data", true);
} }
public void run(String arg) {
open(arg, true, null, null, true);
}
/** /**
* Main function plugin * Main function plugin
* arg is a space separated list of arguments that can be passed to the run method.
* arg looks something like this: "para1=value1 para2=value2 ....."
*
* Supported arguments for arg:
* open=&lt;path&gt;
* dataset=/your/path/to/dataset
*
*/ */
public void run(String arg) {
OpenDialog od = new OpenDialog("Open HDF5 ...", arg); public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack) {
return open(arg, interactive, filename, nameOfDataset, virtualstack, true);
}
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack, boolean showImage) {
File tfile = new File(od.getDirectory() + od.getFileName());
if (!tfile.exists() || !tfile.canRead()) { // Map arguments = HDF5Reader.parseArguments(arg);
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
return; File tfile = null;
if(interactive) {
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
tfile = new File(od.getDirectory() + od.getFileName());
if (!tfile.exists() || !tfile.canRead()) {
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
return null;
}
// Overwrite filename with selected filename
filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
} }
String filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
// Read HDF5 file // Read HDF5 file
H5File file = null; H5File file = null;
boolean close = true; boolean close = true;
List<ImageStack> stacks = new ArrayList<>();
ImageStack stack = null;
try { try {
file = new H5File(filename, H5File.READ); file = new H5File(filename, H5File.READ);
file.setMaxMembers(Integer.MAX_VALUE); file.setMaxMembers(Integer.MAX_VALUE);
file.open(); file.open();
List<Dataset> datasets = HDF5Utilities.getDatasets(file); List<Dataset> datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
DatasetSelection selectedDatasets = null;
if(interactive){
logger.info("Using manual selection");
// Manual selection of the dataset and other parameters via a dialog
selectedDatasets = selectDatasets(datasets);
}
else{
logger.info("Using automatic selection");
selectedDatasets = new DatasetSelection();
for(Dataset dataset: datasets){
if(dataset.getFullName().equals(nameOfDataset)){
selectedDatasets.getDatasets().add(dataset);
break; // we only support one selection for the time being
}
}
selectedDatasets.setVirtualStack(virtualstack);
}
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end // TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
close=!selectedDatasets.isVirtualStack(); close=!selectedDatasets.isVirtualStack();
// TODO Remove // TODO Remove
// Hack as a proof of principle // Hack as a proof of principle
if(selectedDatasets.isGroup()){ if(selectedDatasets.isGroup()){
ImageStack stack = null;
for (Dataset var : selectedDatasets.getDatasets()) { for (Dataset var : selectedDatasets.getDatasets()) {
if(stack == null){ if(stack == null){
@ -80,11 +123,14 @@ public class HDF5Reader implements PlugIn {
ImagePlus imp = new ImagePlus(filename, stack); ImagePlus imp = new ImagePlus(filename, stack);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
return; imp.show();
}
stacks.add(stack);
return stack; // TODO should return stacks instead of stack
} }
for (Dataset var : selectedDatasets.getDatasets()) { for (Dataset var : selectedDatasets.getDatasets()) {
// Read dataset attributes and properties // Read dataset attributes and properties
@ -111,7 +157,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]); stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int stackSize = (int) (dimensions[2] * dimensions[3] * 3); int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
int singleVolumeSize = (int) (dimensions[1] * stackSize); int singleVolumeSize = (int) (dimensions[1] * stackSize);
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) { for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -126,7 +173,9 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE); imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true); imp.setOpenAsHyperStack(true);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 4 && dimensions[3] == 3) { } else if (numberOfDimensions == 4 && dimensions[3] == 3) {
logger.info("3D RGB Image"); logger.info("3D RGB Image");
@ -141,7 +190,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]); stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
stacks.add(stack);
int stackSize = (int) (dimensions[1] * dimensions[2] * 3); int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
for (int lev = 0; lev < dimensions[0]; ++lev) { for (int lev = 0; lev < dimensions[0]; ++lev) {
int startIdx = lev * stackSize; int startIdx = lev * stackSize;
@ -153,7 +203,9 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE); imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true); imp.setOpenAsHyperStack(true);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 4) { } else if (numberOfDimensions == 4) {
logger.info("4D Image (HyperVolume)"); logger.info("4D Image (HyperVolume)");
@ -168,7 +220,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]); stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int size = (int) (dimensions[2] * dimensions[3]); int size = (int) (dimensions[2] * dimensions[3]);
long singleVolumeSize = dimensions[1] * size; long singleVolumeSize = dimensions[1] * size;
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) { for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -182,7 +235,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(1, (int) dimensions[1], (int) dimensions[0]); imp.setDimensions(1, (int) dimensions[1], (int) dimensions[0]);
imp.setOpenAsHyperStack(true); imp.setOpenAsHyperStack(true);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 3 && dimensions[2] == 3) { } else if (numberOfDimensions == 3 && dimensions[2] == 3) {
logger.info("2D RGB Image"); logger.info("2D RGB Image");
@ -196,7 +251,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]); stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]); addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
@ -204,13 +260,13 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE); imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true); imp.setOpenAsHyperStack(true);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 3) { } else if (numberOfDimensions == 3) {
logger.info("3D Image"); logger.info("3D Image");
ImageStack stack;
if(selectedDatasets.isVirtualStack()){ if(selectedDatasets.isVirtualStack()){
logger.info("Use virtual stack"); logger.info("Use virtual stack");
stack = new VirtualStackHDF5(file, var); stack = new VirtualStackHDF5(file, var);
@ -278,21 +334,27 @@ public class HDF5Reader implements PlugIn {
} }
} }
stacks.add(stack);
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 2) { } else if (numberOfDimensions == 2) {
logger.info("2D Image"); logger.info("2D Image");
Object wholeDataset = var.read(); Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]); stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSlice(stack, wholeDataset); addSlice(stack, wholeDataset);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack); ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
imp.resetDisplayRange(); imp.resetDisplayRange();
imp.show(); if(showImage) {
imp.show();
}
} else { } else {
IJ.showStatus("Variable Dimension " + numberOfDimensions + " not supported"); IJ.showStatus("Variable Dimension " + numberOfDimensions + " not supported");
@ -319,6 +381,8 @@ public class HDF5Reader implements PlugIn {
} }
IJ.showProgress(1.0); IJ.showProgress(1.0);
return stack; // TODO should return stacks instead of stack
} }
/** /**
@ -353,7 +417,6 @@ public class HDF5Reader implements PlugIn {
return selectedDatasets; return selectedDatasets;
} }
/** /**
* Add slice to image stack * Add slice to image stack
* @param stack Stack to add slice * @param stack Stack to add slice
@ -433,4 +496,22 @@ public class HDF5Reader implements PlugIn {
stack.addSlice(null, g); stack.addSlice(null, g);
stack.addSlice(null, b); stack.addSlice(null, b);
} }
public static Map<String,String> parseArguments(String arg){
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
Map<String,String> map = new HashMap<>();
arg = arg.trim();
for(String argument: arg.split("\\s+")){
String[] entry = argument.split("=");
if(entry.length==2) {
map.put(entry[0], entry[1]);
}
else{
// ignore
logger.warning("Cannot parse argument " + argument + " - Ignore");
}
}
return map;
}
} }

View File

@ -29,32 +29,32 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override @Override
public void windowOpened(WindowEvent e) { public void windowOpened(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowIconified(WindowEvent e) { public void windowIconified(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowDeiconified(WindowEvent e) { public void windowDeiconified(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowDeactivated(WindowEvent e) { public void windowDeactivated(WindowEvent e) {
logger.info(""); // logger.info("");
} }
@Override @Override
public void windowClosing(WindowEvent e) { public void windowClosing(WindowEvent e) {
logger.info("Closing"); // logger.info("Closing");
} }
@Override @Override
public void windowClosed(WindowEvent e) { public void windowClosed(WindowEvent e) {
logger.info("Closed"); // logger.info("Closed");
if(stack!=null){ if(stack!=null){
stack.close(); stack.close();
@ -63,7 +63,7 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override @Override
public void windowActivated(WindowEvent e) { public void windowActivated(WindowEvent e) {
logger.info(""); // logger.info("");
} }
}); });
} }

View File

@ -0,0 +1,22 @@
package ch.psi.imagej.hdf5;
import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
*/
public class HDF5ReaderTest {
@Test
public void parseArguments() throws Exception {
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
assertTrue(map.get("para1").equals("value1"));
assertTrue(map.get("para2").equals("value2"));
assertTrue(map.get("PARA").equals("VAL"));
}
}

View File

@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import ij.IJ;
import org.junit.Test; import org.junit.Test;
public class HDF5UtilitiesTest { public class HDF5UtilitiesTest {
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
assertEquals(gdescriptor, "three"); assertEquals(gdescriptor, "three");
} }
@Test
public void testOpen() {
IJ.run("HDF5...");
String descriptor = "/test/one/two/three";
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
System.out.println(gdescriptor);
assertEquals(gdescriptor, "three");
}
} }