Merge pull request #6 in ST/ch.psi.daq.rest from atest81 to master
# By Zellweger Christof Ralf # Via Zellweger Christof Ralf * commit 'fad99c2ac3739da5944d5bbc4689ff4e548899c3': ATEST-81: - fixing tests using updated Cassandra startup routine ATEST-81: - correct build setup - add javadoc ATEST-81: - rename and move some components - setup component scanning to work properly - create a HazelcastClient to connect to hazelcast cluster, configure it properly ATEST-81: - first draft for distributed querying
This commit is contained in:
@ -31,7 +31,6 @@ applicationDefaultJvmArgs = [
|
|||||||
//}
|
//}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile (project(':ch.psi.daq.cassandra'))
|
|
||||||
compile (project(':ch.psi.daq.hazelcast'))
|
compile (project(':ch.psi.daq.hazelcast'))
|
||||||
compile 'org.springframework.boot:spring-boot-starter-web:1.2.4.RELEASE'
|
compile 'org.springframework.boot:spring-boot-starter-web:1.2.4.RELEASE'
|
||||||
compile 'com.google.code.gson:gson:2+'
|
compile 'com.google.code.gson:gson:2+'
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
package ch.psi.daq.rest;
|
|
||||||
|
|
||||||
import org.springframework.context.annotation.Bean;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
import ch.psi.daq.common.statistic.StorelessStatistics;
|
|
||||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
|
||||||
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
|
||||||
import ch.psi.daq.hazelcast.query.processor.cassandra.CassandraQueryProcessorLocal;
|
|
||||||
import ch.psi.daq.rest.model.PropertyFilterMixin;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
|
||||||
import com.fasterxml.jackson.core.JsonFactory;
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
@Configuration
|
|
||||||
public class DaqRestConfiguration {
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public QueryProcessor queryProcessor() {
|
|
||||||
// return new DummyQueryProcessor();
|
|
||||||
return new CassandraQueryProcessorLocal();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public JsonFactory jsonFactory() {
|
|
||||||
return new JsonFactory();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public ResponseStreamWriter responseStreamWriter() {
|
|
||||||
return new ResponseStreamWriter();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public ObjectMapper objectMapper() {
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
// only include non-null values
|
|
||||||
mapper.setSerializationInclusion(Include.NON_NULL);
|
|
||||||
// Mixin which is used dynamically to filter out which properties get serialised and which
|
|
||||||
// won't. This way, the user can specify which columns are to be received.
|
|
||||||
mapper.addMixIn(ChannelEvent.class, PropertyFilterMixin.class);
|
|
||||||
mapper.addMixIn(StorelessStatistics.class, PropertyFilterMixin.class);
|
|
||||||
return mapper;
|
|
||||||
}
|
|
||||||
|
|
||||||
// a nested configuration
|
|
||||||
// this guarantees that the ordering of the properties file is as expected
|
|
||||||
// see:
|
|
||||||
// https://jira.spring.io/browse/SPR-10409?focusedCommentId=101393&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-101393
|
|
||||||
// @Configuration
|
|
||||||
// @Import(CassandraConfig.class)
|
|
||||||
// static class InnerConfiguration { }
|
|
||||||
}
|
|
@ -30,22 +30,23 @@ import org.springframework.context.annotation.ComponentScan;
|
|||||||
// @Import(CassandraConfig.class) // either define the context to be imported, or see ComponentScan
|
// @Import(CassandraConfig.class) // either define the context to be imported, or see ComponentScan
|
||||||
// comment below
|
// comment below
|
||||||
@ComponentScan(basePackages = {
|
@ComponentScan(basePackages = {
|
||||||
"ch.psi.daq.rest",
|
|
||||||
"ch.psi.daq.cassandra.config", // define the package name with the CassandraConfig
|
"ch.psi.daq.cassandra.config", // define the package name with the CassandraConfig
|
||||||
// configuration, or @Import it (see above)
|
// configuration, or @Import it (see above)
|
||||||
"ch.psi.daq.cassandra.reader",
|
"ch.psi.daq.cassandra.reader",
|
||||||
"ch.psi.daq.cassandra.writer"
|
"ch.psi.daq.cassandra.writer",
|
||||||
|
"ch.psi.daq.hazelcast",
|
||||||
|
"ch.psi.daq.rest",
|
||||||
})
|
})
|
||||||
public class DaqRestApplication extends SpringBootServletInitializer {
|
public class RestApplication extends SpringBootServletInitializer {
|
||||||
|
|
||||||
|
|
||||||
public static void main(final String[] args) {
|
public static void main(final String[] args) {
|
||||||
SpringApplication.run(DaqRestApplication.class, args);
|
SpringApplication.run(RestApplication.class, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final SpringApplicationBuilder configure(final SpringApplicationBuilder application) {
|
protected final SpringApplicationBuilder configure(final SpringApplicationBuilder application) {
|
||||||
return application.sources(DaqRestApplication.class);
|
return application.sources(RestApplication.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
102
src/main/java/ch/psi/daq/rest/config/RestConfiguration.java
Normal file
102
src/main/java/ch/psi/daq/rest/config/RestConfiguration.java
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
package ch.psi.daq.rest.config;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.PropertySource;
|
||||||
|
import org.springframework.core.env.Environment;
|
||||||
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||||
|
import com.fasterxml.jackson.core.JsonFactory;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.hazelcast.client.HazelcastClient;
|
||||||
|
import com.hazelcast.client.config.ClientConfig;
|
||||||
|
import com.hazelcast.core.HazelcastInstance;
|
||||||
|
import com.hazelcast.core.ManagedContext;
|
||||||
|
import com.hazelcast.spring.context.SpringManagedContext;
|
||||||
|
|
||||||
|
import ch.psi.daq.common.statistic.StorelessStatistics;
|
||||||
|
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||||
|
import ch.psi.daq.hazelcast.config.HazelcastConfig;
|
||||||
|
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
||||||
|
import ch.psi.daq.hazelcast.query.processor.cassandra.CassandraQueryProcessorDistributed;
|
||||||
|
import ch.psi.daq.rest.ResponseStreamWriter;
|
||||||
|
import ch.psi.daq.rest.model.PropertyFilterMixin;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@PropertySource(value = { "classpath:rest.properties" })
|
||||||
|
@PropertySource(value = { "file:${user.home}/.config/daq/rest.properties" }, ignoreResourceNotFound = true)
|
||||||
|
public class RestConfiguration {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(RestConfiguration.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private Environment env;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public QueryProcessor queryProcessor() {
|
||||||
|
return new CassandraQueryProcessorDistributed();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JsonFactory jsonFactory() {
|
||||||
|
return new JsonFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ResponseStreamWriter responseStreamWriter() {
|
||||||
|
return new ResponseStreamWriter();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = HazelcastConfig.BEAN_NAME_HAZELCAST_CLIENT)
|
||||||
|
public HazelcastInstance hazelcastClientInstance() {
|
||||||
|
return HazelcastClient.newHazelcastClient(hazelcastClientConfig());
|
||||||
|
}
|
||||||
|
|
||||||
|
private ClientConfig hazelcastClientConfig() {
|
||||||
|
ClientConfig config = new ClientConfig();
|
||||||
|
config.setManagedContext(managedContext());
|
||||||
|
|
||||||
|
config.getNetworkConfig().setAddresses(hazelcastMembers());
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private ManagedContext managedContext() {
|
||||||
|
return new SpringManagedContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public List<String> hazelcastMembers() {
|
||||||
|
List<String> clusterMembers = Arrays.asList(StringUtils.commaDelimitedListToStringArray(env.getProperty("hazelcast.initialcandidates")));
|
||||||
|
logger.info("The following hosts have been defined to form a Hazelcast cluster: {}", clusterMembers);
|
||||||
|
return clusterMembers;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ObjectMapper objectMapper() {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
// only include non-null values
|
||||||
|
mapper.setSerializationInclusion(Include.NON_NULL);
|
||||||
|
// Mixin which is used dynamically to filter out which properties get serialised and which
|
||||||
|
// won't. This way, the user can specify which columns are to be received.
|
||||||
|
mapper.addMixIn(ChannelEvent.class, PropertyFilterMixin.class);
|
||||||
|
mapper.addMixIn(StorelessStatistics.class, PropertyFilterMixin.class);
|
||||||
|
return mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
// a nested configuration
|
||||||
|
// this guarantees that the ordering of the properties file is as expected
|
||||||
|
// see:
|
||||||
|
// https://jira.spring.io/browse/SPR-10409?focusedCommentId=101393&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-101393
|
||||||
|
// @Configuration
|
||||||
|
// @Import(CassandraConfig.class)
|
||||||
|
// static class InnerConfiguration { }
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package ch.psi.daq.rest;
|
package ch.psi.daq.rest.controller;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -19,6 +19,7 @@ import ch.psi.daq.cassandra.writer.CassandraWriter;
|
|||||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||||
import ch.psi.daq.domain.cassandra.DataEvent;
|
import ch.psi.daq.domain.cassandra.DataEvent;
|
||||||
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
||||||
|
import ch.psi.daq.rest.ResponseStreamWriter;
|
||||||
import ch.psi.daq.rest.queries.AbstractQuery;
|
import ch.psi.daq.rest.queries.AbstractQuery;
|
||||||
import ch.psi.daq.rest.queries.PulseRangeQuery;
|
import ch.psi.daq.rest.queries.PulseRangeQuery;
|
||||||
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
||||||
@ -37,12 +38,8 @@ public class DaqRestController {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private QueryProcessor queryProcessor;
|
private QueryProcessor queryProcessor;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param query
|
|
||||||
* @param res
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
@RequestMapping(value = "/pulserange")
|
@RequestMapping(value = "/pulserange")
|
||||||
public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException {
|
public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException {
|
||||||
|
|
||||||
@ -52,12 +49,6 @@ public class DaqRestController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param query
|
|
||||||
* @param res
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
@RequestMapping(value = "/timerange")
|
@RequestMapping(value = "/timerange")
|
||||||
public void timeRange(@RequestBody TimeRangeQuery query, HttpServletResponse res) throws IOException {
|
public void timeRange(@RequestBody TimeRangeQuery query, HttpServletResponse res) throws IOException {
|
||||||
|
|
||||||
@ -66,12 +57,7 @@ public class DaqRestController {
|
|||||||
executeQuery(query, res);
|
executeQuery(query, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param query
|
|
||||||
* @param res
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
private void executeQuery(AbstractQuery query, HttpServletResponse res) throws IOException {
|
private void executeQuery(AbstractQuery query, HttpServletResponse res) throws IOException {
|
||||||
|
|
||||||
// all the magic happens here
|
// all the magic happens here
|
@ -9,6 +9,12 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
import ch.psi.daq.hazelcast.query.Aggregation;
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
@ -17,12 +23,6 @@ import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
|||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategyFactory;
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategyFactory;
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @author zellweger_c
|
* @author zellweger_c
|
||||||
@ -60,12 +60,23 @@ public abstract class AbstractQuery implements Query {
|
|||||||
private QueryRange queryRange;
|
private QueryRange queryRange;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Constructor.
|
||||||
*
|
*
|
||||||
* @param ordering whether to add a 'orderBy' clause into the database query
|
* @param ordering whether to add a 'orderBy' clause into the database query
|
||||||
* @param channels all the channelIds (channel names) we want to query
|
* @param channels all the channelIds (channel names) we want to query
|
||||||
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
||||||
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
||||||
* @param queryRange TODO
|
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
||||||
|
* {@link BinningStrategy} implementation
|
||||||
|
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
||||||
|
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
||||||
|
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
||||||
|
* per-channel basis
|
||||||
|
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
||||||
|
* manner
|
||||||
|
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
||||||
|
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
||||||
|
* queries
|
||||||
*/
|
*/
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public AbstractQuery(
|
public AbstractQuery(
|
||||||
@ -75,7 +86,7 @@ public abstract class AbstractQuery implements Query {
|
|||||||
@JsonProperty(value = "channels") List<String> channels,
|
@JsonProperty(value = "channels") List<String> channels,
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
||||||
@JsonProperty(value = "binDuration") long lengthOrCount,
|
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
||||||
@ -93,23 +104,24 @@ public abstract class AbstractQuery implements Query {
|
|||||||
this.channels = channels;
|
this.channels = channels;
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
|
|
||||||
this.binningStrategyEnum = binningStrategyEnum; // can be null: default then will be BinCountBinningStrategy
|
this.binningStrategyEnum = binningStrategyEnum; // can be null: default then will be
|
||||||
|
// BinCountBinningStrategy
|
||||||
|
|
||||||
|
|
||||||
if (binningStrategyEnum != null) {
|
if (binningStrategyEnum != null) {
|
||||||
switch (binningStrategyEnum) {
|
switch (binningStrategyEnum) {
|
||||||
case count:
|
case count:
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount);
|
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
||||||
break;
|
break;
|
||||||
case length:
|
case length:
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), lengthOrCount);
|
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), binLengthOrCount);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
logger.warn("No binning strategy has been set. Selecting BinningStrategyBinCount.");
|
logger.warn("No binning strategy has been set. Selecting BinningStrategyBinCount.");
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount);
|
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) lengthOrCount);
|
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -138,6 +150,7 @@ public abstract class AbstractQuery implements Query {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean isAggregateChannels() {
|
public boolean isAggregateChannels() {
|
||||||
return aggregateChannels;
|
return aggregateChannels;
|
||||||
}
|
}
|
||||||
|
@ -3,21 +3,42 @@ package ch.psi.daq.rest.queries;
|
|||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
import ch.psi.daq.hazelcast.query.Aggregation;
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class PulseRangeQuery extends AbstractQuery {
|
public class PulseRangeQuery extends AbstractQuery {
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor.
|
||||||
|
*
|
||||||
|
* @param ordering whether to add a 'orderBy' clause into the database query
|
||||||
|
* @param channels all the channelIds (channel names) we want to query
|
||||||
|
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
||||||
|
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
||||||
|
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
||||||
|
* {@link BinningStrategy} implementation
|
||||||
|
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
||||||
|
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
||||||
|
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
||||||
|
* per-channel basis
|
||||||
|
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
||||||
|
* manner
|
||||||
|
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
||||||
|
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
||||||
|
* queries
|
||||||
|
*/
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public PulseRangeQuery(
|
public PulseRangeQuery(
|
||||||
// note that those annotations are needed for the polymorphic
|
// note that those annotations are needed for the polymorphic
|
||||||
@ -26,13 +47,13 @@ public class PulseRangeQuery extends AbstractQuery {
|
|||||||
@JsonProperty(value = "channels") List<String> channels,
|
@JsonProperty(value = "channels") List<String> channels,
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
||||||
@JsonProperty(value = "binDuration") long binDurationOrBinCount,
|
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
||||||
@JsonProperty(value = "queryRange") QueryRange queryRange) {
|
@JsonProperty(value = "queryRange") QueryRange queryRange) {
|
||||||
|
|
||||||
super(ordering, channels, fields, binningStrategyEnum, binDurationOrBinCount, aggregateChannels, aggregationType,
|
super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
|
||||||
aggregations, queryRange);
|
aggregations, queryRange);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -6,18 +6,19 @@ import java.util.Date;
|
|||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
import ch.psi.daq.hazelcast.query.Aggregation;
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
|
|
||||||
public class TimeRangeQuery extends AbstractQuery {
|
public class TimeRangeQuery extends AbstractQuery {
|
||||||
|
|
||||||
|
|
||||||
@ -26,19 +27,28 @@ public class TimeRangeQuery extends AbstractQuery {
|
|||||||
private static Logger logger = LoggerFactory.getLogger(TimeRangeQuery.class);
|
private static Logger logger = LoggerFactory.getLogger(TimeRangeQuery.class);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Constructor
|
||||||
*
|
*
|
||||||
* @param ordering whether to add a 'orderBy' clause into the database query
|
* @param ordering whether to add a 'orderBy' clause into the database query
|
||||||
* @param channelIds all the sourceIds (channel names) we want to query
|
* @param channels all the channelIds (channel names) we want to query
|
||||||
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
||||||
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
||||||
* @param binningStrategyEnum
|
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
||||||
* @param binDurationOrBinCount
|
* {@link BinningStrategy} implementation
|
||||||
* @param aggregateChannels
|
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
||||||
* @param aggregationType
|
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
||||||
* @param aggregations
|
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
||||||
* @param queryRange
|
* per-channel basis
|
||||||
* @param startDateTime
|
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
||||||
* @param endDateTime
|
* manner
|
||||||
|
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
||||||
|
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
||||||
|
* queries
|
||||||
|
* @param startDateTime if set, the date string (format is:
|
||||||
|
* {@link TimeRangeQuery#DATE_FORMAT_STRING} will be parsed and converted into
|
||||||
|
* milliseconds
|
||||||
|
* @param endDateTime set, the date string (format is: {@link TimeRangeQuery#DATE_FORMAT_STRING}
|
||||||
|
* will be parsed and converted into milliseconds
|
||||||
*/
|
*/
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public TimeRangeQuery(
|
public TimeRangeQuery(
|
||||||
@ -48,7 +58,7 @@ public class TimeRangeQuery extends AbstractQuery {
|
|||||||
@JsonProperty(value = "channels") List<String> channels,
|
@JsonProperty(value = "channels") List<String> channels,
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
||||||
@JsonProperty(value = "binDuration") long binDurationOrBinCount,
|
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
||||||
@ -56,7 +66,8 @@ public class TimeRangeQuery extends AbstractQuery {
|
|||||||
@JsonProperty(value = "startDateTime") String startDateTime,
|
@JsonProperty(value = "startDateTime") String startDateTime,
|
||||||
@JsonProperty(value = "endDateTime") String endDateTime) {
|
@JsonProperty(value = "endDateTime") String endDateTime) {
|
||||||
|
|
||||||
super(ordering, channels, fields, binningStrategyEnum, binDurationOrBinCount, aggregateChannels, aggregationType, aggregations, queryRange);
|
super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
|
||||||
|
aggregations, queryRange);
|
||||||
|
|
||||||
if (startDateTime != null && endDateTime != null) {
|
if (startDateTime != null && endDateTime != null) {
|
||||||
logger.info("startDateTime and endDateTime specified. This takes precedence over the start / end fields.");
|
logger.info("startDateTime and endDateTime specified. This takes precedence over the start / end fields.");
|
||||||
@ -64,7 +75,8 @@ public class TimeRangeQuery extends AbstractQuery {
|
|||||||
Date startDate = DATE_FORMAT.parse(startDateTime);
|
Date startDate = DATE_FORMAT.parse(startDateTime);
|
||||||
Date endDate = DATE_FORMAT.parse(endDateTime);
|
Date endDate = DATE_FORMAT.parse(endDateTime);
|
||||||
|
|
||||||
getQueryRange().setTimeRange(startDate.getTime(), queryRange.getStartNanos(), endDate.getTime(), queryRange.getEndNanos());
|
getQueryRange().setTimeRange(startDate.getTime(), queryRange.getStartNanos(), endDate.getTime(),
|
||||||
|
queryRange.getEndNanos());
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
logger.error("Parsing the start- and/or endDate was unsuccessful. "
|
logger.error("Parsing the start- and/or endDate was unsuccessful. "
|
||||||
+ "The format must be '" + DATE_FORMAT_STRING + "'", e);
|
+ "The format must be '" + DATE_FORMAT_STRING + "'", e);
|
||||||
|
@ -1 +1,6 @@
|
|||||||
cassandra.basekeyspace=daq_zellweger_rest
|
# port for the Spring boot application's embedded Tomcat server
|
||||||
|
server.port=8080
|
||||||
|
|
||||||
|
# defines the list of hosts who are tried for an initial connection to the cluster
|
||||||
|
#hazelcast.members=sf-nube-11.psi.ch,sf-nube-12.psi.ch,sf-nube-13.psi.ch,sf-nube-14.psi.ch
|
||||||
|
hazelcast.initialcandidates=localhost
|
@ -1,6 +1,5 @@
|
|||||||
package ch.psi.daq.test.rest;
|
package ch.psi.daq.test.rest;
|
||||||
|
|
||||||
import org.cassandraunit.spring.EmbeddedCassandra;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.mockito.MockitoAnnotations;
|
import org.mockito.MockitoAnnotations;
|
||||||
@ -16,17 +15,17 @@ import org.springframework.test.web.servlet.MockMvc;
|
|||||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
|
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
|
||||||
import org.springframework.web.context.WebApplicationContext;
|
import org.springframework.web.context.WebApplicationContext;
|
||||||
|
|
||||||
import ch.psi.daq.rest.DaqRestApplication;
|
|
||||||
import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutionListener;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import ch.psi.daq.rest.RestApplication;
|
||||||
|
import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutionListener;
|
||||||
|
|
||||||
|
|
||||||
@SpringApplicationConfiguration(classes = {DaqRestApplication.class, DaqRestApplicationConfiguration.class})
|
|
||||||
@TestExecutionListeners({
|
@TestExecutionListeners({
|
||||||
CassandraDaqUnitDependencyInjectionTestExecutionListener.class,
|
CassandraDaqUnitDependencyInjectionTestExecutionListener.class,
|
||||||
DependencyInjectionTestExecutionListener.class})
|
DependencyInjectionTestExecutionListener.class})
|
||||||
@EmbeddedCassandra
|
@SpringApplicationConfiguration(classes = {RestApplication.class, DaqWebMvcConfiguration.class})
|
||||||
|
//@EmbeddedCassandra
|
||||||
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
|
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
|
||||||
@WebAppConfiguration
|
@WebAppConfiguration
|
||||||
@RunWith(SpringJUnit4ClassRunner.class)
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
@ -5,7 +5,7 @@ import org.springframework.context.annotation.Import;
|
|||||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
|
||||||
|
|
||||||
import ch.psi.daq.rest.DaqRestConfiguration;
|
import ch.psi.daq.rest.config.RestConfiguration;
|
||||||
import ch.psi.daq.test.cassandra.LocalCassandraTestConfig;
|
import ch.psi.daq.test.cassandra.LocalCassandraTestConfig;
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
@ -16,9 +16,9 @@ import ch.psi.daq.test.cassandra.LocalCassandraTestConfig;
|
|||||||
// "ch.psi.daq.cassandra.reader",
|
// "ch.psi.daq.cassandra.reader",
|
||||||
// "ch.psi.daq.cassandra.writer"
|
// "ch.psi.daq.cassandra.writer"
|
||||||
//})
|
//})
|
||||||
@Import(value = {LocalCassandraTestConfig.class, DaqRestConfiguration.class})
|
@Import(value = {LocalCassandraTestConfig.class, RestConfiguration.class})
|
||||||
@EnableWebMvc
|
@EnableWebMvc
|
||||||
public class DaqRestApplicationConfiguration extends WebMvcConfigurationSupport {
|
public class DaqWebMvcConfiguration extends WebMvcConfigurationSupport {
|
||||||
|
|
||||||
// add test-specific beans and configurations here
|
// add test-specific beans and configurations here
|
||||||
}
|
}
|
Reference in New Issue
Block a user