ATEST-81:

- correct build setup
- add javadoc
This commit is contained in:
Zellweger Christof Ralf
2015-06-25 12:36:43 +02:00
parent b0af28b3e0
commit 899d50492c
5 changed files with 92 additions and 62 deletions

View File

@ -31,7 +31,6 @@ applicationDefaultJvmArgs = [
//} //}
dependencies { dependencies {
compile (project(':ch.psi.daq.cassandra'))
compile (project(':ch.psi.daq.hazelcast')) compile (project(':ch.psi.daq.hazelcast'))
compile 'org.springframework.boot:spring-boot-starter-web:1.2.4.RELEASE' compile 'org.springframework.boot:spring-boot-starter-web:1.2.4.RELEASE'
compile 'com.google.code.gson:gson:2+' compile 'com.google.code.gson:gson:2+'

View File

@ -38,12 +38,8 @@ public class DaqRestController {
@Autowired @Autowired
private QueryProcessor queryProcessor; private QueryProcessor queryProcessor;
/**
*
* @param query
* @param res
* @throws IOException
*/
@RequestMapping(value = "/pulserange") @RequestMapping(value = "/pulserange")
public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException { public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException {
@ -53,12 +49,6 @@ public class DaqRestController {
} }
/**
*
* @param query
* @param res
* @throws IOException
*/
@RequestMapping(value = "/timerange") @RequestMapping(value = "/timerange")
public void timeRange(@RequestBody TimeRangeQuery query, HttpServletResponse res) throws IOException { public void timeRange(@RequestBody TimeRangeQuery query, HttpServletResponse res) throws IOException {
@ -67,12 +57,7 @@ public class DaqRestController {
executeQuery(query, res); executeQuery(query, res);
} }
/**
*
* @param query
* @param res
* @throws IOException
*/
private void executeQuery(AbstractQuery query, HttpServletResponse res) throws IOException { private void executeQuery(AbstractQuery query, HttpServletResponse res) throws IOException {
// all the magic happens here // all the magic happens here

View File

@ -9,6 +9,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import ch.psi.daq.cassandra.reader.Ordering; import ch.psi.daq.cassandra.reader.Ordering;
import ch.psi.daq.hazelcast.query.Aggregation; import ch.psi.daq.hazelcast.query.Aggregation;
import ch.psi.daq.hazelcast.query.AggregationType; import ch.psi.daq.hazelcast.query.AggregationType;
@ -17,12 +23,6 @@ import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
import ch.psi.daq.hazelcast.query.bin.BinningStrategyFactory; import ch.psi.daq.hazelcast.query.bin.BinningStrategyFactory;
import ch.psi.daq.hazelcast.query.range.QueryRange; import ch.psi.daq.hazelcast.query.range.QueryRange;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/** /**
* *
* @author zellweger_c * @author zellweger_c
@ -38,9 +38,9 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
@Type(value = TimeRangeQuery.class, name = "timerange"), @Type(value = TimeRangeQuery.class, name = "timerange"),
}) })
public abstract class AbstractQuery implements Query { public abstract class AbstractQuery implements Query {
private static Logger logger = LoggerFactory.getLogger(AbstractQuery.class); private static Logger logger = LoggerFactory.getLogger(AbstractQuery.class);
private List<String> channels; private List<String> channels;
private LinkedHashSet<String> fields; private LinkedHashSet<String> fields;
@ -60,12 +60,23 @@ public abstract class AbstractQuery implements Query {
private QueryRange queryRange; private QueryRange queryRange;
/** /**
* Constructor.
* *
* @param ordering whether to add a 'orderBy' clause into the database query * @param ordering whether to add a 'orderBy' clause into the database query
* @param channels all the channelIds (channel names) we want to query * @param channels all the channelIds (channel names) we want to query
* @param fields the fields (who map to fields in the DB) we are interested in returning to the * @param fields the fields (who map to fields in the DB) we are interested in returning to the
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type) * client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
* @param queryRange TODO * @param binningStrategyEnum enum that maps the user's String to a concrete
* {@link BinningStrategy} implementation
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
* @param aggregateChannels whether aggregation will include all channels, default is on a
* per-channel basis
* @param aggregationType defines whether aggregation takes place in an index- or value-based
* manner
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
* @param queryRange object containing the ranges for either pulse-based queries or time-based
* queries
*/ */
@JsonCreator @JsonCreator
public AbstractQuery( public AbstractQuery(
@ -75,7 +86,7 @@ public abstract class AbstractQuery implements Query {
@JsonProperty(value = "channels") List<String> channels, @JsonProperty(value = "channels") List<String> channels,
@JsonProperty(value = "fields") LinkedHashSet<String> fields, @JsonProperty(value = "fields") LinkedHashSet<String> fields,
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum, @JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
@JsonProperty(value = "binDuration") long lengthOrCount, @JsonProperty(value = "binDuration") long binLengthOrCount,
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels, @JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
@JsonProperty(value = "aggregationType") AggregationType aggregationType, @JsonProperty(value = "aggregationType") AggregationType aggregationType,
@JsonProperty(value = "aggregations") List<Aggregation> aggregations, @JsonProperty(value = "aggregations") List<Aggregation> aggregations,
@ -93,23 +104,24 @@ public abstract class AbstractQuery implements Query {
this.channels = channels; this.channels = channels;
this.fields = fields; this.fields = fields;
this.binningStrategyEnum = binningStrategyEnum; // can be null: default then will be BinCountBinningStrategy this.binningStrategyEnum = binningStrategyEnum; // can be null: default then will be
// BinCountBinningStrategy
if (binningStrategyEnum != null) { if (binningStrategyEnum != null) {
switch (binningStrategyEnum) { switch (binningStrategyEnum) {
case count: case count:
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount); this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
break; break;
case length: case length:
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), lengthOrCount); this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), binLengthOrCount);
break; break;
default: default:
logger.warn("No binning strategy has been set. Selecting BinningStrategyBinCount."); logger.warn("No binning strategy has been set. Selecting BinningStrategyBinCount.");
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount); this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
} }
} else { } else {
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount); this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
} }
} }
@ -138,6 +150,7 @@ public abstract class AbstractQuery implements Query {
} }
@Override
public boolean isAggregateChannels() { public boolean isAggregateChannels() {
return aggregateChannels; return aggregateChannels;
} }
@ -145,7 +158,7 @@ public abstract class AbstractQuery implements Query {
public BinningStrategyEnum getBinningStrategyEnum() { public BinningStrategyEnum getBinningStrategyEnum() {
return binningStrategyEnum; return binningStrategyEnum;
} }
@Override @Override
public BinningStrategy getBinningStrategy() { public BinningStrategy getBinningStrategy() {
return binningStrategy; return binningStrategy;
@ -158,7 +171,7 @@ public abstract class AbstractQuery implements Query {
public List<Aggregation> getAggregations() { public List<Aggregation> getAggregations() {
return aggregations; return aggregations;
} }
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
@ -166,7 +179,7 @@ public abstract class AbstractQuery implements Query {
public QueryRange getQueryRange() { public QueryRange getQueryRange() {
return queryRange; return queryRange;
} }
public void setQueryRange(QueryRange queryRange) { public void setQueryRange(QueryRange queryRange) {
this.queryRange = queryRange; this.queryRange = queryRange;
} }

View File

@ -3,21 +3,42 @@ package ch.psi.daq.rest.queries;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import ch.psi.daq.cassandra.reader.Ordering; import ch.psi.daq.cassandra.reader.Ordering;
import ch.psi.daq.hazelcast.query.Aggregation; import ch.psi.daq.hazelcast.query.Aggregation;
import ch.psi.daq.hazelcast.query.AggregationType; import ch.psi.daq.hazelcast.query.AggregationType;
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
import ch.psi.daq.hazelcast.query.range.QueryRange; import ch.psi.daq.hazelcast.query.range.QueryRange;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
/** /**
* *
*/ */
public class PulseRangeQuery extends AbstractQuery { public class PulseRangeQuery extends AbstractQuery {
/**
* Constructor.
*
* @param ordering whether to add a 'orderBy' clause into the database query
* @param channels all the channelIds (channel names) we want to query
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
* @param binningStrategyEnum enum that maps the user's String to a concrete
* {@link BinningStrategy} implementation
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
* @param aggregateChannels whether aggregation will include all channels, default is on a
* per-channel basis
* @param aggregationType defines whether aggregation takes place in an index- or value-based
* manner
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
* @param queryRange object containing the ranges for either pulse-based queries or time-based
* queries
*/
@JsonCreator @JsonCreator
public PulseRangeQuery( public PulseRangeQuery(
// note that those annotations are needed for the polymorphic // note that those annotations are needed for the polymorphic
@ -26,13 +47,13 @@ public class PulseRangeQuery extends AbstractQuery {
@JsonProperty(value = "channels") List<String> channels, @JsonProperty(value = "channels") List<String> channels,
@JsonProperty(value = "fields") LinkedHashSet<String> fields, @JsonProperty(value = "fields") LinkedHashSet<String> fields,
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum, @JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
@JsonProperty(value = "binDuration") long binDurationOrBinCount, @JsonProperty(value = "binDuration") long binLengthOrCount,
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels, @JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
@JsonProperty(value = "aggregationType") AggregationType aggregationType, @JsonProperty(value = "aggregationType") AggregationType aggregationType,
@JsonProperty(value = "aggregations") List<Aggregation> aggregations, @JsonProperty(value = "aggregations") List<Aggregation> aggregations,
@JsonProperty(value = "queryRange") QueryRange queryRange) { @JsonProperty(value = "queryRange") QueryRange queryRange) {
super(ordering, channels, fields, binningStrategyEnum, binDurationOrBinCount, aggregateChannels, aggregationType, super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
aggregations, queryRange); aggregations, queryRange);
} }

View File

@ -6,18 +6,19 @@ import java.util.Date;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import ch.psi.daq.cassandra.reader.Ordering; import ch.psi.daq.cassandra.reader.Ordering;
import ch.psi.daq.hazelcast.query.Aggregation; import ch.psi.daq.hazelcast.query.Aggregation;
import ch.psi.daq.hazelcast.query.AggregationType; import ch.psi.daq.hazelcast.query.AggregationType;
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
import ch.psi.daq.hazelcast.query.range.QueryRange; import ch.psi.daq.hazelcast.query.range.QueryRange;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class TimeRangeQuery extends AbstractQuery { public class TimeRangeQuery extends AbstractQuery {
@ -26,19 +27,28 @@ public class TimeRangeQuery extends AbstractQuery {
private static Logger logger = LoggerFactory.getLogger(TimeRangeQuery.class); private static Logger logger = LoggerFactory.getLogger(TimeRangeQuery.class);
/** /**
* Constructor
* *
* @param ordering whether to add a 'orderBy' clause into the database query * @param ordering whether to add a 'orderBy' clause into the database query
* @param channelIds all the sourceIds (channel names) we want to query * @param channels all the channelIds (channel names) we want to query
* @param fields the fields (who map to fields in the DB) we are interested in returning to the * @param fields the fields (who map to fields in the DB) we are interested in returning to the
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type) * client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
* @param binningStrategyEnum * @param binningStrategyEnum enum that maps the user's String to a concrete
* @param binDurationOrBinCount * {@link BinningStrategy} implementation
* @param aggregateChannels * @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
* @param aggregationType * count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
* @param aggregations * @param aggregateChannels whether aggregation will include all channels, default is on a
* @param queryRange * per-channel basis
* @param startDateTime * @param aggregationType defines whether aggregation takes place in an index- or value-based
* @param endDateTime * manner
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
* @param queryRange object containing the ranges for either pulse-based queries or time-based
* queries
* @param startDateTime if set, the date string (format is:
* {@link TimeRangeQuery#DATE_FORMAT_STRING} will be parsed and converted into
* milliseconds
* @param endDateTime set, the date string (format is: {@link TimeRangeQuery#DATE_FORMAT_STRING}
* will be parsed and converted into milliseconds
*/ */
@JsonCreator @JsonCreator
public TimeRangeQuery( public TimeRangeQuery(
@ -48,7 +58,7 @@ public class TimeRangeQuery extends AbstractQuery {
@JsonProperty(value = "channels") List<String> channels, @JsonProperty(value = "channels") List<String> channels,
@JsonProperty(value = "fields") LinkedHashSet<String> fields, @JsonProperty(value = "fields") LinkedHashSet<String> fields,
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum, @JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
@JsonProperty(value = "binDuration") long binDurationOrBinCount, @JsonProperty(value = "binDuration") long binLengthOrCount,
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels, @JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
@JsonProperty(value = "aggregationType") AggregationType aggregationType, @JsonProperty(value = "aggregationType") AggregationType aggregationType,
@JsonProperty(value = "aggregations") List<Aggregation> aggregations, @JsonProperty(value = "aggregations") List<Aggregation> aggregations,
@ -56,15 +66,17 @@ public class TimeRangeQuery extends AbstractQuery {
@JsonProperty(value = "startDateTime") String startDateTime, @JsonProperty(value = "startDateTime") String startDateTime,
@JsonProperty(value = "endDateTime") String endDateTime) { @JsonProperty(value = "endDateTime") String endDateTime) {
super(ordering, channels, fields, binningStrategyEnum, binDurationOrBinCount, aggregateChannels, aggregationType, aggregations, queryRange); super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
aggregations, queryRange);
if (startDateTime != null && endDateTime != null) { if (startDateTime != null && endDateTime != null) {
logger.info("startDateTime and endDateTime specified. This takes precedence over the start / end fields."); logger.info("startDateTime and endDateTime specified. This takes precedence over the start / end fields.");
try { try {
Date startDate = DATE_FORMAT.parse(startDateTime); Date startDate = DATE_FORMAT.parse(startDateTime);
Date endDate = DATE_FORMAT.parse(endDateTime); Date endDate = DATE_FORMAT.parse(endDateTime);
getQueryRange().setTimeRange(startDate.getTime(), queryRange.getStartNanos(), endDate.getTime(), queryRange.getEndNanos()); getQueryRange().setTimeRange(startDate.getTime(), queryRange.getStartNanos(), endDate.getTime(),
queryRange.getEndNanos());
} catch (ParseException e) { } catch (ParseException e) {
logger.error("Parsing the start- and/or endDate was unsuccessful. " logger.error("Parsing the start- and/or endDate was unsuccessful. "
+ "The format must be '" + DATE_FORMAT_STRING + "'", e); + "The format must be '" + DATE_FORMAT_STRING + "'", e);