mirror of
https://github.com/paulscherrerinstitute/ch.psi.imagej.hdf5.git
synced 2025-06-24 18:51:08 +02:00
Compare commits
10 Commits
Author | SHA1 | Date | |
---|---|---|---|
75e490fa18 | |||
70fa714aac | |||
2372bf786f | |||
cf0101a25d | |||
3c9fd2b97e | |||
eca2294000 | |||
efc5891e2e | |||
ec38041ddf | |||
8e5ca84a9e | |||
d967b27d7c |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
.idea
|
||||
/target
|
||||
.gradle
|
||||
build
|
||||
|
19
Readme.md
19
Readme.md
@ -1,4 +1,4 @@
|
||||
# Overview
|
||||
# Overview
|
||||
|
||||
ImageJ plugin for reading and writing HDF5 files.
|
||||
|
||||
@ -20,6 +20,16 @@ To save to an HDF5 file use:
|
||||
File > SaveAs > HDF5
|
||||
```
|
||||
|
||||
## Scripting
|
||||
|
||||
To use this plugin from the ImageJs' (python) scripting interface these lines can be used to open a dataset:
|
||||
|
||||
```python
|
||||
from ch.psi.imagej.hdf5 import HDF5Reader
|
||||
reader = HDF5Reader()
|
||||
stack = reader.open("",False, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data_dark", True)
|
||||
```
|
||||
|
||||
# Installation
|
||||
To be able to install this plugin ImageJ need to be run with a Java 7 or greater JVM.
|
||||
|
||||
@ -79,11 +89,18 @@ cd <FIJI_HOME>
|
||||
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
|
||||
|
||||
# Development
|
||||
|
||||
To be able to run the tests and plugin from within the following arguments need to be passed:
|
||||
|
||||

|
||||
|
||||
To create an all in one zip file for installation in a ImageJ installation use:
|
||||
`mvn clean compile assembly:assembly`
|
||||
|
||||
The zip file contains an all in one jar as well as the required native libraries for Windows, Linux and Mac OS X.
|
||||
|
||||
Note: to be able to build the package you need to have access to the PSI artifact server. Therefore this only works within the PSI networks and if you have a certain maven configuration. An example Maven settings.xml that you can copy to `~/.m2/settings.xml` is located [here](settings.xml).
|
||||
|
||||
# Acknowledgements
|
||||
This project was inspired by the ImageJ HDF Plugin of Matthias Schlachter Chair of Pattern Recognition and Image Processing, University of Freiburg, Germany ( https://code.google.com/p/imagej-hdf ) .
|
||||
It is a complete rewrite of the code with the focus on efficiency and maintainability
|
||||
|
BIN
RunSettings.png
Normal file
BIN
RunSettings.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 358 KiB |
@ -2,7 +2,7 @@ apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
|
||||
group = 'ch.psi'
|
||||
version = '0.8.0'
|
||||
version = '0.11.0'
|
||||
|
||||
description = """"""
|
||||
|
||||
@ -13,7 +13,7 @@ targetCompatibility = 1.7
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven { url "http://slsyoke4.psi.ch:8081/artifactory/libs-releases" }
|
||||
maven { url "http://artifacts.psi.ch/artifactory/libs-releases" }
|
||||
}
|
||||
dependencies {
|
||||
compile group: 'hdf5', name: 'hdf', version:'2.10.0'
|
||||
|
2
pom.xml
2
pom.xml
@ -3,7 +3,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ch.psi</groupId>
|
||||
<artifactId>imagej.hdf5</artifactId>
|
||||
<version>0.9.0</version>
|
||||
<version>0.12.0</version>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
44
settings.xml
Normal file
44
settings.xml
Normal file
@ -0,0 +1,44 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<profiles>
|
||||
<profile>
|
||||
<repositories>
|
||||
<repository>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<id>central</id>
|
||||
<name>libs-releases</name>
|
||||
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<snapshots />
|
||||
<id>snapshots</id>
|
||||
<name>libs-snapshots</name>
|
||||
<url>http://artifacts.psi.ch/artifactory/libs-snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<id>central</id>
|
||||
<name>libs-releases</name>
|
||||
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<snapshots />
|
||||
<id>snapshots</id>
|
||||
<name>libs-releases</name>
|
||||
<url>http://artifacts.psi.ch/artifactory/libs-releases</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
<id>artifactory</id>
|
||||
</profile>
|
||||
</profiles>
|
||||
<activeProfiles>
|
||||
<activeProfile>artifactory</activeProfile>
|
||||
</activeProfiles>
|
||||
</settings>
|
@ -10,7 +10,7 @@ import ij.plugin.PlugIn;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
@ -28,45 +28,88 @@ public class HDF5Reader implements PlugIn {
|
||||
*/
|
||||
public static void main(String[] args){
|
||||
HDF5Reader r = new HDF5Reader();
|
||||
r.run("");
|
||||
// r.run("");
|
||||
r.open("",false, "/Users/ebner/Desktop/A8_d_400N030_.h5", "/exchange/data", true);
|
||||
}
|
||||
|
||||
|
||||
public void run(String arg) {
|
||||
open(arg, true, null, null, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function plugin
|
||||
* arg is a space separated list of arguments that can be passed to the run method.
|
||||
* arg looks something like this: "para1=value1 para2=value2 ....."
|
||||
*
|
||||
* Supported arguments for arg:
|
||||
* open=<path>
|
||||
* dataset=/your/path/to/dataset
|
||||
*
|
||||
*/
|
||||
public void run(String arg) {
|
||||
|
||||
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
|
||||
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack) {
|
||||
return open(arg, interactive, filename, nameOfDataset, virtualstack, true);
|
||||
}
|
||||
|
||||
|
||||
File tfile = new File(od.getDirectory() + od.getFileName());
|
||||
if (!tfile.exists() || !tfile.canRead()) {
|
||||
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
|
||||
return;
|
||||
public ImageStack open(String arg, boolean interactive, String filename, String nameOfDataset, boolean virtualstack, boolean showImage) {
|
||||
|
||||
// Map arguments = HDF5Reader.parseArguments(arg);
|
||||
|
||||
File tfile = null;
|
||||
if(interactive) {
|
||||
OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
|
||||
tfile = new File(od.getDirectory() + od.getFileName());
|
||||
if (!tfile.exists() || !tfile.canRead()) {
|
||||
IJ.showMessage("Cannot open file: "+tfile.getAbsolutePath());
|
||||
return null;
|
||||
}
|
||||
|
||||
// Overwrite filename with selected filename
|
||||
filename = tfile.getAbsolutePath();
|
||||
|
||||
IJ.showStatus("Loading HDF5 File: " + filename);
|
||||
IJ.showProgress(0.0);
|
||||
}
|
||||
String filename = tfile.getAbsolutePath();
|
||||
|
||||
IJ.showStatus("Loading HDF5 File: " + filename);
|
||||
IJ.showProgress(0.0);
|
||||
|
||||
// Read HDF5 file
|
||||
H5File file = null;
|
||||
boolean close = true;
|
||||
|
||||
List<ImageStack> stacks = new ArrayList<>();
|
||||
ImageStack stack = null;
|
||||
|
||||
try {
|
||||
file = new H5File(filename, H5File.READ);
|
||||
file.setMaxMembers(Integer.MAX_VALUE);
|
||||
file.open();
|
||||
|
||||
List<Dataset> datasets = HDF5Utilities.getDatasets(file);
|
||||
DatasetSelection selectedDatasets = selectDatasets(datasets);
|
||||
|
||||
DatasetSelection selectedDatasets = null;
|
||||
if(interactive){
|
||||
logger.info("Using manual selection");
|
||||
// Manual selection of the dataset and other parameters via a dialog
|
||||
selectedDatasets = selectDatasets(datasets);
|
||||
}
|
||||
else{
|
||||
logger.info("Using automatic selection");
|
||||
selectedDatasets = new DatasetSelection();
|
||||
for(Dataset dataset: datasets){
|
||||
if(dataset.getFullName().equals(nameOfDataset)){
|
||||
selectedDatasets.getDatasets().add(dataset);
|
||||
break; // we only support one selection for the time being
|
||||
}
|
||||
}
|
||||
selectedDatasets.setVirtualStack(virtualstack);
|
||||
}
|
||||
|
||||
|
||||
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
|
||||
close=!selectedDatasets.isVirtualStack();
|
||||
|
||||
|
||||
// TODO Remove
|
||||
// Hack as a proof of principle
|
||||
if(selectedDatasets.isGroup()){
|
||||
ImageStack stack = null;
|
||||
|
||||
for (Dataset var : selectedDatasets.getDatasets()) {
|
||||
if(stack == null){
|
||||
@ -80,11 +123,14 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename, stack);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
return;
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
stacks.add(stack);
|
||||
return stack; // TODO should return stacks instead of stack
|
||||
}
|
||||
|
||||
|
||||
|
||||
for (Dataset var : selectedDatasets.getDatasets()) {
|
||||
|
||||
// Read dataset attributes and properties
|
||||
@ -111,7 +157,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stacks.add(stack);
|
||||
int stackSize = (int) (dimensions[2] * dimensions[3] * 3);
|
||||
int singleVolumeSize = (int) (dimensions[1] * stackSize);
|
||||
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
|
||||
@ -126,7 +173,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 4 && dimensions[3] == 3) {
|
||||
logger.info("3D RGB Image");
|
||||
@ -141,7 +190,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||
stack = new ImageStack((int) dimensions[2], (int) dimensions[1]);
|
||||
stacks.add(stack);
|
||||
int stackSize = (int) (dimensions[1] * dimensions[2] * 3);
|
||||
for (int lev = 0; lev < dimensions[0]; ++lev) {
|
||||
int startIdx = lev * stackSize;
|
||||
@ -153,7 +203,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 4) {
|
||||
logger.info("4D Image (HyperVolume)");
|
||||
@ -168,7 +220,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stack = new ImageStack((int) dimensions[3], (int) dimensions[2]);
|
||||
stacks.add(stack);
|
||||
int size = (int) (dimensions[2] * dimensions[3]);
|
||||
long singleVolumeSize = dimensions[1] * size;
|
||||
for (int volIDX = 0; volIDX < dimensions[0]; ++volIDX) {
|
||||
@ -182,7 +235,9 @@ public class HDF5Reader implements PlugIn {
|
||||
imp.setDimensions(1, (int) dimensions[1], (int) dimensions[0]);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 3 && dimensions[2] == 3) {
|
||||
logger.info("2D RGB Image");
|
||||
@ -196,7 +251,8 @@ public class HDF5Reader implements PlugIn {
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stacks.add(stack);
|
||||
addSliceRGB(stack, wholeDataset, (int) dimensions[0], (int) dimensions[1]);
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
|
||||
@ -204,13 +260,13 @@ public class HDF5Reader implements PlugIn {
|
||||
imp = new CompositeImage(imp, CompositeImage.COMPOSITE);
|
||||
imp.setOpenAsHyperStack(true);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 3) {
|
||||
logger.info("3D Image");
|
||||
|
||||
ImageStack stack;
|
||||
|
||||
if(selectedDatasets.isVirtualStack()){
|
||||
logger.info("Use virtual stack");
|
||||
stack = new VirtualStackHDF5(file, var);
|
||||
@ -278,21 +334,27 @@ public class HDF5Reader implements PlugIn {
|
||||
}
|
||||
}
|
||||
|
||||
stacks.add(stack);
|
||||
ImagePlus imp = new ImagePlusHDF5(filename + " " + datasetName, stack);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else if (numberOfDimensions == 2) {
|
||||
logger.info("2D Image");
|
||||
|
||||
Object wholeDataset = var.read();
|
||||
|
||||
ImageStack stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stack = new ImageStack((int) dimensions[1], (int) dimensions[0]);
|
||||
stacks.add(stack);
|
||||
addSlice(stack, wholeDataset);
|
||||
|
||||
ImagePlus imp = new ImagePlus(filename + " " + datasetName, stack);
|
||||
imp.resetDisplayRange();
|
||||
imp.show();
|
||||
if(showImage) {
|
||||
imp.show();
|
||||
}
|
||||
|
||||
} else {
|
||||
IJ.showStatus("Variable Dimension " + numberOfDimensions + " not supported");
|
||||
@ -319,6 +381,8 @@ public class HDF5Reader implements PlugIn {
|
||||
}
|
||||
|
||||
IJ.showProgress(1.0);
|
||||
|
||||
return stack; // TODO should return stacks instead of stack
|
||||
}
|
||||
|
||||
/**
|
||||
@ -353,7 +417,6 @@ public class HDF5Reader implements PlugIn {
|
||||
return selectedDatasets;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add slice to image stack
|
||||
* @param stack Stack to add slice
|
||||
@ -433,4 +496,22 @@ public class HDF5Reader implements PlugIn {
|
||||
stack.addSlice(null, g);
|
||||
stack.addSlice(null, b);
|
||||
}
|
||||
|
||||
public static Map<String,String> parseArguments(String arg){
|
||||
|
||||
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
|
||||
Map<String,String> map = new HashMap<>();
|
||||
arg = arg.trim();
|
||||
for(String argument: arg.split("\\s+")){
|
||||
String[] entry = argument.split("=");
|
||||
if(entry.length==2) {
|
||||
map.put(entry[0], entry[1]);
|
||||
}
|
||||
else{
|
||||
// ignore
|
||||
logger.warning("Cannot parse argument " + argument + " - Ignore");
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
@ -29,32 +29,32 @@ public class ImagePlusHDF5 extends ImagePlus {
|
||||
|
||||
@Override
|
||||
public void windowOpened(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowIconified(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowDeiconified(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowDeactivated(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowClosing(WindowEvent e) {
|
||||
logger.info("Closing");
|
||||
// logger.info("Closing");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void windowClosed(WindowEvent e) {
|
||||
logger.info("Closed");
|
||||
// logger.info("Closed");
|
||||
|
||||
if(stack!=null){
|
||||
stack.close();
|
||||
@ -63,7 +63,7 @@ public class ImagePlusHDF5 extends ImagePlus {
|
||||
|
||||
@Override
|
||||
public void windowActivated(WindowEvent e) {
|
||||
logger.info("");
|
||||
// logger.info("");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
22
src/test/java/ch/psi/imagej/hdf5/HDF5ReaderTest.java
Normal file
22
src/test/java/ch/psi/imagej/hdf5/HDF5ReaderTest.java
Normal file
@ -0,0 +1,22 @@
|
||||
package ch.psi.imagej.hdf5;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HDF5ReaderTest {
|
||||
@Test
|
||||
public void parseArguments() throws Exception {
|
||||
|
||||
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
|
||||
assertTrue(map.get("para1").equals("value1"));
|
||||
assertTrue(map.get("para2").equals("value2"));
|
||||
assertTrue(map.get("PARA").equals("VAL"));
|
||||
}
|
||||
|
||||
}
|
@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import ij.IJ;
|
||||
import org.junit.Test;
|
||||
|
||||
public class HDF5UtilitiesTest {
|
||||
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
|
||||
assertEquals(gdescriptor, "three");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testOpen() {
|
||||
IJ.run("HDF5...");
|
||||
String descriptor = "/test/one/two/three";
|
||||
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
|
||||
System.out.println(gdescriptor);
|
||||
assertEquals(gdescriptor, "three");
|
||||
}
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user