10 Commits

Author SHA1 Message Date
75e490fa18 Implemented option to hide image when opening 2018-01-24 10:35:52 +01:00
70fa714aac Fixed build instructions 2018-01-24 10:33:54 +01:00
2372bf786f Update readme 2017-08-28 17:21:17 +02:00
cf0101a25d Increment version to 0.11.0 2017-08-28 17:15:37 +02:00
3c9fd2b97e Add quick and dirty hack
Quick and dirty hack to allow imagej (python) scripts to open hdf5 files via a routine and do operation with them.
This is far from perfect and needs major cleanup! It is implemented like this due to lack of time ...
2017-08-28 17:13:10 +02:00
eca2294000 Attemp to use plugin options 2017-08-28 16:19:28 +02:00
efc5891e2e update gitignore 2017-08-17 09:38:31 +02:00
ec38041ddf Fix artifact repository URL 2016-11-30 15:53:22 +01:00
8e5ca84a9e removed unecessary logging 2015-04-21 16:06:41 +02:00
d967b27d7c removed log messages 2015-04-21 15:15:06 +02:00
10 changed files with 222 additions and 46 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea
/target
.gradle
build

View File

@ -1,4 +1,4 @@
# Overview
# Overview
ImageJ plugin for reading and writing HDF5 files.
@ -20,6 +20,16 @@ To save to an HDF5 file use:
File > SaveAs > HDF5
```
## Scripting
To use this plugin from the ImageJs' (python) scripting interface these lines can be used to open a dataset:
```python
from ch.psi.imagej.hdf5 import HDF5Reader
reader = HDF5Reader()
stack = reader.open("",False, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data_dark", True)
```
# Installation
To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM.
@ -79,11 +89,18 @@ cd <FIJI_HOME>
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
# Development
To be able to run the tests and plugin from within the following arguments need to be passed:
![RunSettings](RunSettings.png)
To create an all in one zip file for installation in a ImageJ installation use:
`mvn clean compile assembly:assembly`
The zip file contains an all in one jar as well as the required native libraries for Windows, Linux and Mac OS X.
Note: to be able to build the package you need to have access to the PSI artifact server. Therefore this only works within the PSI networks and if you have a certain maven configuration. An example Maven settings.xml that you can copy to `~/.m2/settings.xml` is located [here](settings.xml).
# Acknowledgements
This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany ( https://code.google.com/p/imagej-hdf ) .
It is a complete rewrite of the code with the focus on efficiency and maintainability

BIN
RunSettings.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

View File

@ -2,7 +2,7 @@ apply plugin: 'java'
apply plugin: 'maven'
group = 'ch.psi'
version = '0.8.0'
version = '0.11.0'
description = """"""
@ -13,7 +13,7 @@ targetCompatibility = 1.7
repositories {
mavenCentral()
maven { url "http://slsyoke4.psi.ch:8081/artifactory/libs-releases" }
maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
}
dependencies {
compile group: 'hdf5', name: 'hdf', version:'2.10.0'

View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ch.psi</groupId>
<artifactId>imagej.hdf5</artifactId>
<version>0.9.0</version>
<version>0.12.0</version>
<dependencies>
<dependency>

44
settings.xml Normal file
View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<profiles>
<profile>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</repository>
<repository>
<snapshots />
<id>snapshots</id>
<name>libs-snapshots</name>
<url>http://artifacts.psi.ch/artifactory/libs-snapshots</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
<pluginRepository>
<snapshots />
<id>snapshots</id>
<name>libs-releases</name>
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
</pluginRepository>
</pluginRepositories>
<id>artifactory</id>
</profile>
</profiles>
<activeProfiles>
<activeProfile>artifactory</activeProfile>
</activeProfiles>
</settings>

View File

@ -10,7 +10,7 @@ import ij.plugin.PlugIn;
import java.io.File;
import java.lang.reflect.Array;
import java.util.List;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
@ -28,45 +28,88 @@ public class HDF5Reader implements PlugIn {
*/
public static void main(String[] args){
HDF5Reader r = new HDF5Reader();
r.run("");
// r.run("");
r.open("",false, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data", true);
}
public void run(String arg) {
open(arg, true, null, null, true);
}
/**
* Main function plugin
* arg is a space separated list of arguments that can be passed to the run method.
* arg looks something like this: "para1=value1 para2=value2 ....."
*
* Supported arguments for arg:
* open=&lt;path&gt;
* dataset=/your/path/to/dataset
*
*/
public void run(String arg) {
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack) {
return open(arg, interactive, filename, nameOfDataset, virtualstack, true);
}
File tfile = new File(od.getDirectory() + od.getFileName());
if (!tfile.exists() || !tfile.canRead()) {
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
return;
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack, boolean showImage) {
// Map arguments = HDF5Reader.parseArguments(arg);
File tfile = null;
if(interactive) {
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
tfile = new File(od.getDirectory() + od.getFileName());
if (!tfile.exists() || !tfile.canRead()) {
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
return null;
}
// Overwrite filename with selected filename
filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
}
String filename = tfile.getAbsolutePath();
IJ.showStatus("Loading HDF5 File: " + filename);
IJ.showProgress(0.0);
// Read HDF5 file
H5File file = null;
boolean close = true;
List<ImageStack> stacks = new ArrayList<>();
ImageStack stack = null;
try {
file = new H5File(filename, H5File.READ);
file.setMaxMembers(Integer.MAX_VALUE);
file.open();
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
DatasetSelection selectedDatasets = null;
if(interactive){
logger.info("Using manual selection");
// Manual selection of the dataset and other parameters via a dialog
selectedDatasets = selectDatasets(datasets);
}
else{
logger.info("Using automatic selection");
selectedDatasets = new DatasetSelection();
for(Dataset dataset: datasets){
if(dataset.getFullName().equals(nameOfDataset)){
selectedDatasets.getDatasets().add(dataset);
break; // we only support one selection for the time being
}
}
selectedDatasets.setVirtualStack(virtualstack);
}
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
close=!selectedDatasets.isVirtualStack();
// TODO Remove
// Hack as a proof of principle
if(selectedDatasets.isGroup()){
ImageStack stack = null;
for (Dataset var : selectedDatasets.getDatasets()) {
if(stack == null){
@ -80,11 +123,14 @@ public class HDF5Reader implements PlugIn {
ImagePlus imp = new ImagePlus(filename, stack);
imp.resetDisplayRange();
imp.show();
return;
if(showImage) {
imp.show();
}
stacks.add(stack);
return stack; // TODO should return stacks instead of stack
}
for (Dataset var : selectedDatasets.getDatasets()) {
// Read dataset attributes and properties
@ -111,7 +157,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
int singleVolumeSize = (int) (dimensions[1] * stackSize);
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -126,7 +173,9 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 4 && dimensions[3] == 3) {
logger.info("3D RGB Image");
@ -141,7 +190,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
stacks.add(stack);
int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
for (int lev = 0; lev < dimensions[0]; ++lev) {
int startIdx = lev * stackSize;
@ -153,7 +203,9 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 4) {
logger.info("4D Image (HyperVolume)");
@ -168,7 +220,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
stacks.add(stack);
int size = (int) (dimensions[2] * dimensions[3]);
long singleVolumeSize = dimensions[1] * size;
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
@ -182,7 +235,9 @@ public class HDF5Reader implements PlugIn {
imp.setDimensions(1, (int) dimensions[1], (int) dimensions[0]);
imp.setOpenAsHyperStack(true);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 3 && dimensions[2] == 3) {
logger.info("2D RGB Image");
@ -196,7 +251,8 @@ public class HDF5Reader implements PlugIn {
Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
@ -204,13 +260,13 @@ public class HDF5Reader implements PlugIn {
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
imp.setOpenAsHyperStack(true);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 3) {
logger.info("3D Image");
ImageStack stack;
if(selectedDatasets.isVirtualStack()){
logger.info("Use virtual stack");
stack = new VirtualStackHDF5(file, var);
@ -278,21 +334,27 @@ public class HDF5Reader implements PlugIn {
}
}
stacks.add(stack);
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else if (numberOfDimensions == 2) {
logger.info("2D Image");
Object wholeDataset = var.read();
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
stacks.add(stack);
addSlice(stack, wholeDataset);
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
imp.resetDisplayRange();
imp.show();
if(showImage) {
imp.show();
}
} else {
IJ.showStatus("Variable Dimension " + numberOfDimensions + " not supported");
@ -319,6 +381,8 @@ public class HDF5Reader implements PlugIn {
}
IJ.showProgress(1.0);
return stack; // TODO should return stacks instead of stack
}
/**
@ -353,7 +417,6 @@ public class HDF5Reader implements PlugIn {
return selectedDatasets;
}
/**
* Add slice to image stack
* @param stack Stack to add slice
@ -433,4 +496,22 @@ public class HDF5Reader implements PlugIn {
stack.addSlice(null, g);
stack.addSlice(null, b);
}
public static Map<String,String> parseArguments(String arg){
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
Map<String,String> map = new HashMap<>();
arg = arg.trim();
for(String argument: arg.split("\\s+")){
String[] entry = argument.split("=");
if(entry.length==2) {
map.put(entry[0], entry[1]);
}
else{
// ignore
logger.warning("Cannot parse argument " + argument + " - Ignore");
}
}
return map;
}
}

View File

@ -29,32 +29,32 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override
public void windowOpened(WindowEvent e) {
logger.info("");
// logger.info("");
}
@Override
public void windowIconified(WindowEvent e) {
logger.info("");
// logger.info("");
}
@Override
public void windowDeiconified(WindowEvent e) {
logger.info("");
// logger.info("");
}
@Override
public void windowDeactivated(WindowEvent e) {
logger.info("");
// logger.info("");
}
@Override
public void windowClosing(WindowEvent e) {
logger.info("Closing");
// logger.info("Closing");
}
@Override
public void windowClosed(WindowEvent e) {
logger.info("Closed");
// logger.info("Closed");
if(stack!=null){
stack.close();
@ -63,7 +63,7 @@ public class ImagePlusHDF5 extends ImagePlus {
@Override
public void windowActivated(WindowEvent e) {
logger.info("");
// logger.info("");
}
});
}

View File

@ -0,0 +1,22 @@
package ch.psi.imagej.hdf5;
import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
*/
public class HDF5ReaderTest {
@Test
public void parseArguments() throws Exception {
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
assertTrue(map.get("para1").equals("value1"));
assertTrue(map.get("para2").equals("value2"));
assertTrue(map.get("PARA").equals("VAL"));
}
}

View File

@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
import static org.junit.Assert.*;
import ij.IJ;
import org.junit.Test;
public class HDF5UtilitiesTest {
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
assertEquals(gdescriptor, "three");
}
@Test
public void testOpen() {
IJ.run("HDF5...");
String descriptor = "/test/one/two/three";
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
System.out.println(gdescriptor);
assertEquals(gdescriptor, "three");
}
}