Refactored classes and build configuration so that now plugin jar can be

build with "clean compile assembly:single"
This commit is contained in:
ebner 2014-02-25 21:24:09 +01:00
parent c0dd33436e
commit ef4135b9ad
8 changed files with 49 additions and 23 deletions

View File

@ -24,9 +24,5 @@
<attribute name="maven.pomderived" value="true"/> <attribute name="maven.pomderived" value="true"/>
</attributes> </attributes>
</classpathentry> </classpathentry>
<classpathentry kind="lib" path="hdf-java/jhdf.jar"/>
<classpathentry kind="lib" path="hdf-java/jhdf5.jar"/>
<classpathentry kind="lib" path="hdf-java/jhdf5obj.jar"/>
<classpathentry kind="lib" path="hdf-java/jhdfobj.jar"/>
<classpathentry kind="output" path="target/classes"/> <classpathentry kind="output" path="target/classes"/>
</classpath> </classpath>

1
hdf-java/Readme.md Normal file
View File

@ -0,0 +1 @@
jar files where uploaded to http://yoke.psi.ch/artifactory

29
pom.xml
View File

@ -12,6 +12,35 @@
<version>1.46</version> <version>1.46</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdfobj</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdfobj.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf5</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf5.jar</systemPath> -->
</dependency>
<dependency>
<groupId>hdf5</groupId>
<artifactId>jhdf5obj</artifactId>
<version>1.0.0</version>
<!-- <scope>system</scope>
<systemPath>${project.basedir}/hdf-java/jhdf5obj.jar</systemPath> -->
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -6,7 +6,7 @@ import ij.plugin.PlugIn;
import java.util.regex.*; import java.util.regex.*;
import java.lang.String; import java.lang.String;
public class HDF5_Config implements PlugIn { public class HDF5Config implements PlugIn {
public void run(String arg) { public void run(String arg) {
// set default values // set default values
@ -74,7 +74,7 @@ public class HDF5_Config implements PlugIn {
// dollarRegexpForGrouping); // dollarRegexpForGrouping);
try { try {
String[] formatTokens = HDF5_GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping); String[] formatTokens = HDF5GroupedVarnames.parseFormatString(groupVarsByNameFormat, dollarRegexpForGrouping);
for (int i = 0; i < formatTokens.length; i++) { for (int i = 0; i < formatTokens.length; i++) {
System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]); System.out.println("tok " + Integer.toString(i) + " : " + formatTokens[i]);
} }

View File

@ -6,7 +6,7 @@ import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
public class HDF5_GroupedVarnames { public class HDF5GroupedVarnames {
private final List<String> matchedVarNames = new ArrayList<String>(); private final List<String> matchedVarNames = new ArrayList<String>();
private final List<String> unMatchedVarNames = new ArrayList<String>(); private final List<String> unMatchedVarNames = new ArrayList<String>();

View File

@ -41,7 +41,7 @@ import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import ncsa.hdf.hdflib.HDFException; import ncsa.hdf.hdflib.HDFException;
public class HDF5_Reader_ implements PlugIn { public class HDF5Reader implements PlugIn {
public void run(String arg) { public void run(String arg) {
// make sure default values for config are written // make sure default values for config are written
// HDF5_Config.setDefaultsIfNoValueExists(); // HDF5_Config.setDefaultsIfNoValueExists();
@ -81,7 +81,7 @@ public class HDF5_Reader_ implements PlugIn {
H5File inFile = null; H5File inFile = null;
// define grouping class // define grouping class
HDF5_GroupedVarnames groupedVarnames = new HDF5_GroupedVarnames(); HDF5GroupedVarnames groupedVarnames = new HDF5GroupedVarnames();
boolean loadGroupedVarNames = true; boolean loadGroupedVarNames = true;
try { try {
@ -91,13 +91,13 @@ public class HDF5_Reader_ implements PlugIn {
/*------------------------------------------------------------------- /*-------------------------------------------------------------------
* read HDF5_Config prefs * read HDF5_Config prefs
*-------------------------------------------------------------------*/ *-------------------------------------------------------------------*/
boolean groupVarsByName = Boolean.getBoolean(HDF5_Config.getDefaultValue("HDF5.groupVarsByName")); boolean groupVarsByName = Boolean.getBoolean(HDF5Config.getDefaultValue("HDF5.groupVarsByName"));
groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName); groupVarsByName = Prefs.get("HDF5.groupVarsByName", groupVarsByName);
boolean showUnmatchedDataSetNames = Boolean.getBoolean(HDF5_Config.getDefaultValue("HDF5.showUnmatchedDataSetNames")); boolean showUnmatchedDataSetNames = Boolean.getBoolean(HDF5Config.getDefaultValue("HDF5.showUnmatchedDataSetNames"));
showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames); showUnmatchedDataSetNames = Prefs.get("HDF5.showUnmatchedDataSetNames", showUnmatchedDataSetNames);
String groupVarsByNameFormatGroup = HDF5_Config.getDefaultValue("HDF5.groupVarsByNameFormatGroup"); String groupVarsByNameFormatGroup = HDF5Config.getDefaultValue("HDF5.groupVarsByNameFormatGroup");
groupVarsByNameFormatGroup = Prefs.get("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup); groupVarsByNameFormatGroup = Prefs.get("HDF5.groupVarsByNameFormatGroup", groupVarsByNameFormatGroup);
// TODO: try to read attribute containing format String // TODO: try to read attribute containing format String
@ -150,16 +150,16 @@ public class HDF5_Reader_ implements PlugIn {
System.out.println("File has format string for grouping: " + groupVarsByNameFormat); System.out.println("File has format string for grouping: " + groupVarsByNameFormat);
} else { } else {
System.out.println("File has no format string for grouping" + ", using default"); System.out.println("File has no format string for grouping" + ", using default");
groupVarsByNameFormat = HDF5_Config.getDefaultValue("HDF5.groupVarsByNameFormat"); groupVarsByNameFormat = HDF5Config.getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat); groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
} }
} catch (Exception e) { } catch (Exception e) {
System.out.println("Error occured read format string " + "for grouping, using default"); System.out.println("Error occured read format string " + "for grouping, using default");
groupVarsByNameFormat = HDF5_Config.getDefaultValue("HDF5.groupVarsByNameFormat"); groupVarsByNameFormat = HDF5Config.getDefaultValue("HDF5.groupVarsByNameFormat");
groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat); groupVarsByNameFormat = Prefs.get("HDF5.groupVarsByNameFormat", groupVarsByNameFormat);
} }
String dollarRegexpForGrouping = HDF5_Config.getDefaultValue("HDF5.dollarRegexpForGrouping"); String dollarRegexpForGrouping = HDF5Config.getDefaultValue("HDF5.dollarRegexpForGrouping");
dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping); dollarRegexpForGrouping = Prefs.get("HDF5.dollarRegexpForGrouping", dollarRegexpForGrouping);
/*------------------------------------------------------------------- /*-------------------------------------------------------------------

View File

@ -35,7 +35,7 @@ import ncsa.hdf.object.*; // the common object package
import ncsa.hdf.object.h5.*; // the HDF5 implementation import ncsa.hdf.object.h5.*; // the HDF5 implementation
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5_Writer_ implements PlugInFilter { public class HDF5Writer implements PlugInFilter {
private Boolean _batchMode = false; private Boolean _batchMode = false;
private String _batchFileName = null; private String _batchFileName = null;
@ -156,7 +156,7 @@ public class HDF5_Writer_ implements PlugInFilter {
if (splitChannels) { if (splitChannels) {
// parse format string // parse format string
String[] formatTokens = HDF5_GroupedVarnames.parseFormatString(formatString, "[0-9]+"); // dummy String[] formatTokens = HDF5GroupedVarnames.parseFormatString(formatString, "[0-9]+"); // dummy
// regexp // regexp
long[] channelDims = null; long[] channelDims = null;
if (nLevs > 1) { if (nLevs > 1) {

View File

@ -1,5 +1,5 @@
File>Import, "HDF5...", ch.psi.imageJ.hdf5.HDF5_Reader_ File>Import, "HDF5...", ch.psi.imagej.hdf5.HDF5Reader
File>Save As, "HDF5", ch.psi.imageJ.hdf5.HDF5_Writer_ File>Save As, "HDF5", ch.psi.imagej.hdf5.HDF5Writer
Plugins>HDF5, "Load HDF5 File", ch.psi.imageJ.hdf5.HDF5_Reader_ Plugins>HDF5, "Load HDF5 File", ch.psi.imagej.hdf5.HDF5Reader
Plugins>HDF5, "Save HDF5 File", ch.psi.imageJ.hdf5.HDF5_Writer_ Plugins>HDF5, "Save HDF5 File", ch.psi.imagej.hdf5.HDF5Writer
Plugins>HDF5, "HDF5 Preferences", ch.psi.imageJ.hdf5.HDF5_Config Plugins>HDF5, "HDF5 Preferences", ch.psi.imagej.hdf5.HDF5Config