Attemp to use plugin options

This commit is contained in:
ebner 2017-08-28 16:19:28 +02:00
parent efc5891e2e
commit eca2294000
5 changed files with 119 additions and 3 deletions

View File

@ -1,4 +1,4 @@
# Overview # Overview
ImageJ plugin for reading and writing HDF5 files. ImageJ plugin for reading and writing HDF5 files.
@ -79,6 +79,11 @@ cd <FIJI_HOME>
Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`. Starting with Java 8 just the LD_LIBRARY_PATH variable need to be set. For MacOSX it is export `DYLD_LIBRARY_PATH=lib/mac64/:$DYLD_LIBRARY_PATH`.
# Development # Development
To be able to run the tests and plugin from within the following arguments need to be passed:
![RunSettings](RunSettings.png)
To create an all in one zip file for installation in a ImageJ installation use: To create an all in one zip file for installation in a ImageJ installation use:
`mvn clean compile assembly:assembly` `mvn clean compile assembly:assembly`

BIN
RunSettings.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

View File

@ -10,7 +10,10 @@ import ij.plugin.PlugIn;
import java.io.File; import java.io.File;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
@ -28,14 +31,24 @@ public class HDF5Reader implements PlugIn {
*/ */
public static void main(String[] args){ public static void main(String[] args){
HDF5Reader r = new HDF5Reader(); HDF5Reader r = new HDF5Reader();
r.run(""); r.run("path=/Users/ebner/Desktop/ open=A8_d_400N030_.h5");
// IJ.run("HDF5...", "/Users/ebner/Desktop/A8_d_400N030_.h5");
} }
/** /**
* Main function plugin * Main function plugin
* arg is a space separated list of arguments that can be passed to the run method.
* arg looks something like this: "para1=value1 para2=value2 ....."
*
* Supported arguments for arg:
* open=&lt;path&gt;
* dataset=/your/path/to/dataset
*
*/ */
public void run(String arg) { public void run(String arg) {
Map arguments = HDF5Reader.parseArguments(arg);
OpenDialog od = new OpenDialog("Open HDF5 ...", arg); OpenDialog od = new OpenDialog("Open HDF5 ...", arg);
@ -58,7 +71,19 @@ public class HDF5Reader implements PlugIn {
file.open(); file.open();
List<Dataset> datasets = HDF5Utilities.getDatasets(file); List<Dataset> datasets = HDF5Utilities.getDatasets(file);
DatasetSelection selectedDatasets = selectDatasets(datasets);
DatasetSelection selectedDatasets = null;
if(arguments.containsKey("dataset")){
logger.info("Using automatic selection");
selectedDatasets = selectDatasets(datasets, arguments);
}
else{
logger.info("Using manual selection");
// Manual selection of the dataset and other parameters via a dialog
selectedDatasets = selectDatasets(datasets);
}
// TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end // TODO to be removed - Workaround virtual stack - keep HDF5 file open at the end
close=!selectedDatasets.isVirtualStack(); close=!selectedDatasets.isVirtualStack();
@ -353,6 +378,41 @@ public class HDF5Reader implements PlugIn {
return selectedDatasets; return selectedDatasets;
} }
private DatasetSelection selectDatasets(List<Dataset> datasets, Map<String,String> arguments) throws HDF5Exception {
GenericDialog gd = new GenericDialog("Variable Name Selection");
gd.addMessage("Please select variables to be loaded.\n");
SelectionPanel panel = new SelectionPanel(datasets);
gd = new GenericDialog("Variable Name Selection");
gd.add(panel);
gd.addMessage("");
gd.pack();
gd.showDialog();
DatasetSelection selectedDatasets = new DatasetSelection();
for(Dataset dataset: datasets){
if(dataset.getFullName().equals(arguments.get("dataset"))){
selectedDatasets.getDatasets().add(dataset);
break; // we only support one selection for the time being
}
}
// selectedDatasets.setGroup(panel.groupValues());
// selectedDatasets.setSlice(panel.getSlice());
// selectedDatasets.setModulo(panel.getModulo());
if(arguments.containsKey("virtualstack") && arguments.get("virtualstack").equalsIgnoreCase("false")){
selectedDatasets.setVirtualStack(false);
}
else{
selectedDatasets.setVirtualStack(true);
}
return selectedDatasets;
}
/** /**
* Add slice to image stack * Add slice to image stack
@ -433,4 +493,22 @@ public class HDF5Reader implements PlugIn {
stack.addSlice(null, g); stack.addSlice(null, g);
stack.addSlice(null, b); stack.addSlice(null, b);
} }
public static Map<String,String> parseArguments(String arg){
/// ImageJ arguments look something like this: "para1=value1 para2=value2 ....."
Map<String,String> map = new HashMap<>();
arg = arg.trim();
for(String argument: arg.split("\\s+")){
String[] entry = argument.split("=");
if(entry.length==2) {
map.put(entry[0], entry[1]);
}
else{
// ignore
logger.warning("Cannot parse argument " + argument + " - Ignore");
}
}
return map;
}
} }

View File

@ -0,0 +1,22 @@
package ch.psi.imagej.hdf5;
import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
*/
public class HDF5ReaderTest {
@Test
public void parseArguments() throws Exception {
Map map = HDF5Reader.parseArguments("para1=value1 para2=value2 PARA=VAL");
assertTrue(map.get("para1").equals("value1"));
assertTrue(map.get("para2").equals("value2"));
assertTrue(map.get("PARA").equals("VAL"));
}
}

View File

@ -2,6 +2,7 @@ package ch.psi.imagej.hdf5;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import ij.IJ;
import org.junit.Test; import org.junit.Test;
public class HDF5UtilitiesTest { public class HDF5UtilitiesTest {
@ -22,4 +23,14 @@ public class HDF5UtilitiesTest {
assertEquals(gdescriptor, "three"); assertEquals(gdescriptor, "three");
} }
@Test
public void testOpen() {
IJ.run("HDF5...");
String descriptor = "/test/one/two/three";
String gdescriptor = HDF5Utilities.getDatasetName(descriptor);
System.out.println(gdescriptor);
assertEquals(gdescriptor, "three");
}
} }