Merge pull request #7 in ST/ch.psi.daq.rest from atest81 to master
# By Zellweger Christof Ralf # Via Zellweger Christof Ralf * commit '091824a7961971facc0b924ac17d356fc5667d0f': ATEST-99: - improving version for dependency management ATEST-81: - trying to fix bug re react stream and closed queue ATEST-81: - tracking down bugs and trying to fix ATEST-81: - fixing / trying to fix various bugs ATEST-81: - removing ComponentScan ATEST-81: - implementing dummy write method for manual testing ATEST-81: - renaming config so that all the configs follow the same naming patterns
This commit is contained in:
47
build.gradle
47
build.gradle
@ -1,29 +1,21 @@
|
|||||||
version = '1.0.0'
|
version = '1.0.0'
|
||||||
|
|
||||||
buildscript {
|
buildscript {
|
||||||
dependencies {
|
dependencies { classpath(libraries.spring_boot_gradle_plugin) }
|
||||||
classpath("org.springframework.boot:spring-boot-gradle-plugin:1.2.4.RELEASE")
|
repositories { jcenter() }
|
||||||
}
|
|
||||||
repositories {
|
|
||||||
jcenter()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
apply plugin: 'spring-boot'
|
apply plugin: 'spring-boot'
|
||||||
|
|
||||||
repositories {
|
repositories { jcenter() }
|
||||||
jcenter()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
springBoot {
|
springBoot { // when using spring loaded turn on noverify
|
||||||
// when using spring loaded turn on noverify
|
noverify = true }
|
||||||
noverify = true
|
|
||||||
}
|
|
||||||
|
|
||||||
applicationDefaultJvmArgs = [
|
applicationDefaultJvmArgs = [
|
||||||
"-Dfile.encoding=UTF-8",
|
"-Dfile.encoding=UTF-8",
|
||||||
// if you need to debug java agents:
|
// if you need to debug java agents:
|
||||||
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006"
|
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006"
|
||||||
]
|
]
|
||||||
|
|
||||||
//configurations {
|
//configurations {
|
||||||
@ -31,24 +23,21 @@ applicationDefaultJvmArgs = [
|
|||||||
//}
|
//}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile (project(':ch.psi.daq.hazelcast'))
|
compile (project(':ch.psi.daq.hazelcast'))
|
||||||
compile 'org.springframework.boot:spring-boot-starter-web:1.2.4.RELEASE'
|
compile libraries.spring_boot_starter_web
|
||||||
compile 'com.google.code.gson:gson:2+'
|
compile libraries.commons_lang
|
||||||
compile 'org.apache.commons:commons-lang3:3.4'
|
|
||||||
|
|
||||||
testCompile 'org.springframework.boot:spring-boot-starter-test:1.2.4.RELEASE'
|
testCompile libraries.spring_boot_starter_test
|
||||||
testCompile 'org.skyscreamer:jsonassert:1.2.3'
|
testCompile libraries.jsonassert
|
||||||
testCompile 'com.jayway.jsonpath:json-path:2.0.0'
|
testCompile libraries.jsonpath
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadArchives {
|
uploadArchives {
|
||||||
repositories {
|
repositories {
|
||||||
mavenDeployer {
|
mavenDeployer { pom.artifactId = 'rest' }
|
||||||
pom.artifactId = 'rest'
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
task dropItQueryREST(dependsOn: build) << {
|
task dropItQueryREST(dependsOn: build) << {
|
||||||
exec{ executable "curl"; args "-X", "POST", "-F", "file=@build/libs/ch.psi.daq.rest-" + version + ".jar", "http://dropit.psi.ch:8080"; }
|
exec{ executable "curl"; args "-X", "POST", "-F", "file=@build/libs/ch.psi.daq.rest-" + version + ".jar", "http://dropit.psi.ch:8080"; }
|
||||||
}
|
}
|
@ -4,39 +4,12 @@ import org.springframework.boot.SpringApplication;
|
|||||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||||
import org.springframework.boot.builder.SpringApplicationBuilder;
|
import org.springframework.boot.builder.SpringApplicationBuilder;
|
||||||
import org.springframework.boot.context.web.SpringBootServletInitializer;
|
import org.springframework.boot.context.web.SpringBootServletInitializer;
|
||||||
import org.springframework.context.annotation.ComponentScan;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Entry point to our rest-frontend of the Swissfel application which most importantly wires all the @RestController
|
* Entry point to our rest-frontend of the data acquisition (DAQ) application which most importantly
|
||||||
* annotated classes.
|
* wires all the @RestController annotated classes.
|
||||||
* <p>
|
|
||||||
*
|
|
||||||
* This acts as a @Configuration class for Spring. As such it has @ComponentScan annotation that
|
|
||||||
* enables scanning for another Spring components in current package and its subpackages.
|
|
||||||
* <p>
|
|
||||||
* Another annotation is @EnableAutoConfiguration which tells Spring Boot to run autoconfiguration.
|
|
||||||
* <p>
|
|
||||||
* It also extends SpringBootServletInitializer which will configure Spring servlet for us, and
|
|
||||||
* overrides the configure() method to point to itself, so Spring can find the main configuration.
|
|
||||||
* <p>
|
|
||||||
* Finally, the main() method consists of single static call to SpringApplication.run().
|
|
||||||
* <p>
|
|
||||||
* Methods annotated with @Bean are Java beans that are container-managed, i.e. managed by Spring.
|
|
||||||
* Whenever there are @Autowire, @Inject or similar annotations found in the code (which is being
|
|
||||||
* scanned through the @ComponentScan annotation), the container then knows how to create those
|
|
||||||
* beans and inject them accordingly.
|
|
||||||
*/
|
*/
|
||||||
@SpringBootApplication
|
@SpringBootApplication
|
||||||
// @Import(CassandraConfig.class) // either define the context to be imported, or see ComponentScan
|
|
||||||
// comment below
|
|
||||||
@ComponentScan(basePackages = {
|
|
||||||
"ch.psi.daq.cassandra.config", // define the package name with the CassandraConfig
|
|
||||||
// configuration, or @Import it (see above)
|
|
||||||
"ch.psi.daq.cassandra.reader",
|
|
||||||
"ch.psi.daq.cassandra.writer",
|
|
||||||
"ch.psi.daq.hazelcast",
|
|
||||||
"ch.psi.daq.rest",
|
|
||||||
})
|
|
||||||
public class RestApplication extends SpringBootServletInitializer {
|
public class RestApplication extends SpringBootServletInitializer {
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,12 +2,12 @@ package ch.psi.daq.rest.config;
|
|||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Import;
|
||||||
import org.springframework.context.annotation.PropertySource;
|
import org.springframework.context.annotation.PropertySource;
|
||||||
import org.springframework.core.env.Environment;
|
import org.springframework.core.env.Environment;
|
||||||
import org.springframework.util.StringUtils;
|
import org.springframework.util.StringUtils;
|
||||||
@ -15,33 +15,30 @@ import org.springframework.util.StringUtils;
|
|||||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||||
import com.fasterxml.jackson.core.JsonFactory;
|
import com.fasterxml.jackson.core.JsonFactory;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.hazelcast.client.HazelcastClient;
|
import com.google.common.collect.Sets;
|
||||||
import com.hazelcast.client.config.ClientConfig;
|
|
||||||
import com.hazelcast.core.HazelcastInstance;
|
|
||||||
import com.hazelcast.core.ManagedContext;
|
|
||||||
import com.hazelcast.spring.context.SpringManagedContext;
|
|
||||||
|
|
||||||
import ch.psi.daq.common.statistic.StorelessStatistics;
|
import ch.psi.daq.common.statistic.StorelessStatistics;
|
||||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||||
|
import ch.psi.daq.hazelcast.config.HazelcastClientConfig;
|
||||||
import ch.psi.daq.hazelcast.config.HazelcastConfig;
|
import ch.psi.daq.hazelcast.config.HazelcastConfig;
|
||||||
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
|
||||||
import ch.psi.daq.hazelcast.query.processor.cassandra.CassandraQueryProcessorDistributed;
|
|
||||||
import ch.psi.daq.rest.ResponseStreamWriter;
|
|
||||||
import ch.psi.daq.rest.model.PropertyFilterMixin;
|
import ch.psi.daq.rest.model.PropertyFilterMixin;
|
||||||
|
import ch.psi.daq.rest.response.ResponseStreamWriter;
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
@PropertySource(value = { "classpath:rest.properties" })
|
@PropertySource(value = {"classpath:rest.properties"})
|
||||||
@PropertySource(value = { "file:${user.home}/.config/daq/rest.properties" }, ignoreResourceNotFound = true)
|
@PropertySource(value = {"file:${user.home}/.config/daq/rest.properties"}, ignoreResourceNotFound = true)
|
||||||
public class RestConfiguration {
|
public class RestConfig {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(RestConfiguration.class);
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private Environment env;
|
private Environment env;
|
||||||
|
|
||||||
@Bean
|
// a nested configuration
|
||||||
public QueryProcessor queryProcessor() {
|
// this guarantees that the ordering of the properties file is as expected
|
||||||
return new CassandraQueryProcessorDistributed();
|
// see:
|
||||||
|
// https://jira.spring.io/browse/SPR-10409?focusedCommentId=101393&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-101393
|
||||||
|
@Configuration
|
||||||
|
@Import({HazelcastConfig.class, HazelcastClientConfig.class})
|
||||||
|
static class InnerConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
@ -54,32 +51,6 @@ public class RestConfiguration {
|
|||||||
return new ResponseStreamWriter();
|
return new ResponseStreamWriter();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = HazelcastConfig.BEAN_NAME_HAZELCAST_CLIENT)
|
|
||||||
public HazelcastInstance hazelcastClientInstance() {
|
|
||||||
return HazelcastClient.newHazelcastClient(hazelcastClientConfig());
|
|
||||||
}
|
|
||||||
|
|
||||||
private ClientConfig hazelcastClientConfig() {
|
|
||||||
ClientConfig config = new ClientConfig();
|
|
||||||
config.setManagedContext(managedContext());
|
|
||||||
|
|
||||||
config.getNetworkConfig().setAddresses(hazelcastMembers());
|
|
||||||
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private ManagedContext managedContext() {
|
|
||||||
return new SpringManagedContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public List<String> hazelcastMembers() {
|
|
||||||
List<String> clusterMembers = Arrays.asList(StringUtils.commaDelimitedListToStringArray(env.getProperty("hazelcast.initialcandidates")));
|
|
||||||
logger.info("The following hosts have been defined to form a Hazelcast cluster: {}", clusterMembers);
|
|
||||||
return clusterMembers;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public ObjectMapper objectMapper() {
|
public ObjectMapper objectMapper() {
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
@ -92,11 +63,13 @@ public class RestConfiguration {
|
|||||||
return mapper;
|
return mapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
// a nested configuration
|
@Bean
|
||||||
// this guarantees that the ordering of the properties file is as expected
|
private Set<String> defaultResponseFields() {
|
||||||
// see:
|
List<String> defaultFields = Arrays.asList(
|
||||||
// https://jira.spring.io/browse/SPR-10409?focusedCommentId=101393&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-101393
|
StringUtils.commaDelimitedListToStringArray(
|
||||||
// @Configuration
|
env.getProperty("rest.default.response.fields")
|
||||||
// @Import(CassandraConfig.class)
|
));
|
||||||
// static class InnerConfiguration { }
|
Set<String> defaultResponseFields = Sets.newHashSet(defaultFields.iterator());
|
||||||
|
return defaultResponseFields;
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,9 +1,15 @@
|
|||||||
package ch.psi.daq.rest.controller;
|
package ch.psi.daq.rest.controller;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.function.LongFunction;
|
||||||
|
import java.util.function.LongUnaryOperator;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.LongStream;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
@ -11,18 +17,20 @@ import javax.servlet.http.HttpServletResponse;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.writer.CassandraWriter;
|
import ch.psi.daq.cassandra.writer.CassandraWriter;
|
||||||
|
import ch.psi.daq.domain.DataType;
|
||||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||||
import ch.psi.daq.domain.cassandra.DataEvent;
|
import ch.psi.daq.domain.cassandra.DataEvent;
|
||||||
|
import ch.psi.daq.hazelcast.query.AbstractQuery;
|
||||||
|
import ch.psi.daq.hazelcast.query.PulseRangeQuery;
|
||||||
|
import ch.psi.daq.hazelcast.query.TimeRangeQuery;
|
||||||
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
||||||
import ch.psi.daq.rest.ResponseStreamWriter;
|
import ch.psi.daq.rest.response.ResponseStreamWriter;
|
||||||
import ch.psi.daq.rest.queries.AbstractQuery;
|
|
||||||
import ch.psi.daq.rest.queries.PulseRangeQuery;
|
|
||||||
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
|
||||||
|
|
||||||
@RestController
|
@RestController
|
||||||
public class DaqRestController {
|
public class DaqRestController {
|
||||||
@ -39,7 +47,6 @@ public class DaqRestController {
|
|||||||
private QueryProcessor queryProcessor;
|
private QueryProcessor queryProcessor;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@RequestMapping(value = "/pulserange")
|
@RequestMapping(value = "/pulserange")
|
||||||
public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException {
|
public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException {
|
||||||
|
|
||||||
@ -71,20 +78,68 @@ public class DaqRestController {
|
|||||||
responseStreamWriter.respond(flatStreams, query, res);
|
responseStreamWriter.respond(flatStreams, query, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ==========================================================================================
|
||||||
|
// TODO this is simply a method for initial / rudimentary testing - remove once further evolved
|
||||||
@RequestMapping(value = "/write")
|
@RequestMapping(value = "/write")
|
||||||
public void writeDummyEntry() {
|
public long writeDummyEntry() {
|
||||||
int pulseId = 100;
|
|
||||||
ChannelEvent event = new ChannelEvent(
|
|
||||||
"dummy-test",
|
|
||||||
pulseId,
|
|
||||||
0,
|
|
||||||
pulseId,
|
|
||||||
pulseId,
|
|
||||||
0,
|
|
||||||
"data_" + UUID.randomUUID().toString());
|
|
||||||
|
|
||||||
CompletableFuture<Void> future = cassandraWriter.writeAsync(3, 0, event);
|
long startIndex = System.currentTimeMillis();
|
||||||
|
|
||||||
|
writeData(3,startIndex, 100, new String[]{"channel1", "channel2"});
|
||||||
|
|
||||||
|
return startIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeData(int dataReplication, long startIndex, long nrOfElements,
|
||||||
|
String... channelNames) {
|
||||||
|
writeData(dataReplication, startIndex, nrOfElements,
|
||||||
|
i -> 2 * i,
|
||||||
|
i -> 2 * i,
|
||||||
|
i -> 2 * i,
|
||||||
|
i -> 2 * i,
|
||||||
|
i -> 2 * i,
|
||||||
|
i -> Long.valueOf(2 * i),
|
||||||
|
channelNames);
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> void writeData(int dataReplication, long startIndex, long nrOfElements,
|
||||||
|
LongUnaryOperator iocMillis, LongUnaryOperator iocNanos, LongUnaryOperator pulseIds,
|
||||||
|
LongUnaryOperator globalMillis, LongUnaryOperator globalNanos, LongFunction<T> valueFunction,
|
||||||
|
String... channelNames) {
|
||||||
|
|
||||||
|
Assert.notNull(channelNames);
|
||||||
|
|
||||||
|
CompletableFuture<Void> future;
|
||||||
|
|
||||||
|
List<ChannelEvent> events =
|
||||||
|
Arrays.stream(channelNames)
|
||||||
|
.parallel()
|
||||||
|
.flatMap(
|
||||||
|
channelName -> {
|
||||||
|
Stream<ChannelEvent> stream =
|
||||||
|
LongStream
|
||||||
|
.range(startIndex, startIndex + nrOfElements)
|
||||||
|
.parallel()
|
||||||
|
.mapToObj(
|
||||||
|
i -> {
|
||||||
|
Object value = valueFunction.apply(i);
|
||||||
|
return new ChannelEvent(channelName,
|
||||||
|
iocMillis.applyAsLong(i),
|
||||||
|
iocNanos.applyAsLong(i),
|
||||||
|
pulseIds.applyAsLong(i),
|
||||||
|
globalMillis.applyAsLong(i),
|
||||||
|
globalNanos.applyAsLong(i),
|
||||||
|
value,
|
||||||
|
DataType.getTypeName(value.getClass())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
future = cassandraWriter.writeAsync(dataReplication, (int) TimeUnit.HOURS.toSeconds(1), events);
|
||||||
future.join();
|
future.join();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,192 +0,0 @@
|
|||||||
package ch.psi.daq.rest.queries;
|
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.util.Assert;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
|
||||||
import ch.psi.daq.hazelcast.query.Query;
|
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategyFactory;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author zellweger_c
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
@JsonTypeInfo(
|
|
||||||
use = JsonTypeInfo.Id.NAME,
|
|
||||||
include = JsonTypeInfo.As.PROPERTY,
|
|
||||||
property = "queryType")
|
|
||||||
@JsonSubTypes(
|
|
||||||
{
|
|
||||||
@Type(value = PulseRangeQuery.class, name = "pulserange"),
|
|
||||||
@Type(value = TimeRangeQuery.class, name = "timerange"),
|
|
||||||
})
|
|
||||||
public abstract class AbstractQuery implements Query {
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(AbstractQuery.class);
|
|
||||||
|
|
||||||
private List<String> channels;
|
|
||||||
|
|
||||||
private LinkedHashSet<String> fields;
|
|
||||||
|
|
||||||
private List<Aggregation> aggregations;
|
|
||||||
|
|
||||||
private AggregationType aggregationType;
|
|
||||||
|
|
||||||
private boolean aggregateChannels;
|
|
||||||
|
|
||||||
private Ordering ordering;
|
|
||||||
|
|
||||||
private BinningStrategy binningStrategy;
|
|
||||||
|
|
||||||
private BinningStrategyEnum binningStrategyEnum;
|
|
||||||
|
|
||||||
private QueryRange queryRange;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor.
|
|
||||||
*
|
|
||||||
* @param ordering whether to add a 'orderBy' clause into the database query
|
|
||||||
* @param channels all the channelIds (channel names) we want to query
|
|
||||||
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
|
||||||
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
|
||||||
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
|
||||||
* {@link BinningStrategy} implementation
|
|
||||||
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
|
||||||
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
|
||||||
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
|
||||||
* per-channel basis
|
|
||||||
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
|
||||||
* manner
|
|
||||||
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
|
||||||
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
|
||||||
* queries
|
|
||||||
*/
|
|
||||||
@JsonCreator
|
|
||||||
public AbstractQuery(
|
|
||||||
// note that those annotations are needed for the polymorphic
|
|
||||||
// mapping to work correctly
|
|
||||||
@JsonProperty(value = "ordering") Ordering ordering,
|
|
||||||
@JsonProperty(value = "channels") List<String> channels,
|
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
|
||||||
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
|
||||||
@JsonProperty(value = "queryRange") QueryRange queryRange) {
|
|
||||||
|
|
||||||
this.ordering = ordering;
|
|
||||||
this.aggregateChannels = aggregateChannels;
|
|
||||||
this.aggregationType = aggregationType;
|
|
||||||
this.aggregations = aggregations;
|
|
||||||
this.queryRange = queryRange;
|
|
||||||
|
|
||||||
Assert.notNull(channels, "channel name must not be null.");
|
|
||||||
Assert.notNull(fields, "field, i.e. property, names must not be null.");
|
|
||||||
|
|
||||||
this.channels = channels;
|
|
||||||
this.fields = fields;
|
|
||||||
|
|
||||||
this.binningStrategyEnum = binningStrategyEnum; // can be null: default then will be
|
|
||||||
// BinCountBinningStrategy
|
|
||||||
|
|
||||||
|
|
||||||
if (binningStrategyEnum != null) {
|
|
||||||
switch (binningStrategyEnum) {
|
|
||||||
case count:
|
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
|
||||||
break;
|
|
||||||
case length:
|
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), binLengthOrCount);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
logger.warn("No binning strategy has been set. Selecting BinningStrategyBinCount.");
|
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.binningStrategy = BinningStrategyFactory.getBinningStrategy(getQueryRange(), (int) binLengthOrCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<String> getChannels() {
|
|
||||||
return Collections.unmodifiableList(channels);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Ordering getOrdering() {
|
|
||||||
return ordering;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public AggregationType getAggregationType() {
|
|
||||||
return aggregationType;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAggregateChannels() {
|
|
||||||
return aggregateChannels;
|
|
||||||
}
|
|
||||||
|
|
||||||
public BinningStrategyEnum getBinningStrategyEnum() {
|
|
||||||
return binningStrategyEnum;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BinningStrategy getBinningStrategy() {
|
|
||||||
return binningStrategy;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LinkedHashSet<String> getFields() {
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Aggregation> getAggregations() {
|
|
||||||
return aggregations;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public QueryRange getQueryRange() {
|
|
||||||
return queryRange;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setQueryRange(QueryRange queryRange) {
|
|
||||||
this.queryRange = queryRange;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return ReflectionToStringBuilder.reflectionToString(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
package ch.psi.daq.rest.queries;
|
|
||||||
|
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the strategies that map to the similarly named {@link BinningStrategy} implementations.
|
|
||||||
* <p>
|
|
||||||
* This enum is used for type-safety and simplicity for the rest interface.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public enum BinningStrategyEnum {
|
|
||||||
|
|
||||||
length,
|
|
||||||
|
|
||||||
count
|
|
||||||
}
|
|
@ -1,69 +0,0 @@
|
|||||||
package ch.psi.daq.rest.queries;
|
|
||||||
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class PulseRangeQuery extends AbstractQuery {
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor.
|
|
||||||
*
|
|
||||||
* @param ordering whether to add a 'orderBy' clause into the database query
|
|
||||||
* @param channels all the channelIds (channel names) we want to query
|
|
||||||
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
|
||||||
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
|
||||||
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
|
||||||
* {@link BinningStrategy} implementation
|
|
||||||
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
|
||||||
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
|
||||||
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
|
||||||
* per-channel basis
|
|
||||||
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
|
||||||
* manner
|
|
||||||
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
|
||||||
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
|
||||||
* queries
|
|
||||||
*/
|
|
||||||
@JsonCreator
|
|
||||||
public PulseRangeQuery(
|
|
||||||
// note that those annotations are needed for the polymorphic
|
|
||||||
// mapping to work correctly
|
|
||||||
@JsonProperty(value = "ordering") Ordering ordering,
|
|
||||||
@JsonProperty(value = "channels") List<String> channels,
|
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
|
||||||
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
|
||||||
@JsonProperty(value = "queryRange") QueryRange queryRange) {
|
|
||||||
|
|
||||||
super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
|
|
||||||
aggregations, queryRange);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return ToStringBuilder.reflectionToString(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,96 +0,0 @@
|
|||||||
package ch.psi.daq.rest.queries;
|
|
||||||
|
|
||||||
import java.text.ParseException;
|
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
|
||||||
import ch.psi.daq.hazelcast.query.Aggregation;
|
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategy;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
|
||||||
|
|
||||||
public class TimeRangeQuery extends AbstractQuery {
|
|
||||||
|
|
||||||
|
|
||||||
public static final String DATE_FORMAT_STRING = "yyyy/MM/dd hh:mm:ss.SSS";
|
|
||||||
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat(DATE_FORMAT_STRING);
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(TimeRangeQuery.class);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
*
|
|
||||||
* @param ordering whether to add a 'orderBy' clause into the database query
|
|
||||||
* @param channels all the channelIds (channel names) we want to query
|
|
||||||
* @param fields the fields (who map to fields in the DB) we are interested in returning to the
|
|
||||||
* client, needs to be in insertion order (hence the {@link LinkedHashSet} type)
|
|
||||||
* @param binningStrategyEnum enum that maps the user's String to a concrete
|
|
||||||
* {@link BinningStrategy} implementation
|
|
||||||
* @param binLengthOrCount depending on the chosen binning strategy, this field defines either the
|
|
||||||
* count (how many pulse ids are to be put inside 1 bin) or the time frame for 1 bin
|
|
||||||
* @param aggregateChannels whether aggregation will include all channels, default is on a
|
|
||||||
* per-channel basis
|
|
||||||
* @param aggregationType defines whether aggregation takes place in an index- or value-based
|
|
||||||
* manner
|
|
||||||
* @param aggregations list of aggregations / statistics to calculate, e.g. min, max and average
|
|
||||||
* @param queryRange object containing the ranges for either pulse-based queries or time-based
|
|
||||||
* queries
|
|
||||||
* @param startDateTime if set, the date string (format is:
|
|
||||||
* {@link TimeRangeQuery#DATE_FORMAT_STRING} will be parsed and converted into
|
|
||||||
* milliseconds
|
|
||||||
* @param endDateTime set, the date string (format is: {@link TimeRangeQuery#DATE_FORMAT_STRING}
|
|
||||||
* will be parsed and converted into milliseconds
|
|
||||||
*/
|
|
||||||
@JsonCreator
|
|
||||||
public TimeRangeQuery(
|
|
||||||
// note that those annotations are needed for the polymorphic
|
|
||||||
// mapping to work correctly
|
|
||||||
@JsonProperty(value = "ordering") Ordering ordering,
|
|
||||||
@JsonProperty(value = "channels") List<String> channels,
|
|
||||||
@JsonProperty(value = "fields") LinkedHashSet<String> fields,
|
|
||||||
@JsonProperty(value = "binningStrategy") BinningStrategyEnum binningStrategyEnum,
|
|
||||||
@JsonProperty(value = "binDuration") long binLengthOrCount,
|
|
||||||
@JsonProperty(value = "aggregateChannels") boolean aggregateChannels,
|
|
||||||
@JsonProperty(value = "aggregationType") AggregationType aggregationType,
|
|
||||||
@JsonProperty(value = "aggregations") List<Aggregation> aggregations,
|
|
||||||
@JsonProperty(value = "queryRange") QueryRange queryRange,
|
|
||||||
@JsonProperty(value = "startDateTime") String startDateTime,
|
|
||||||
@JsonProperty(value = "endDateTime") String endDateTime) {
|
|
||||||
|
|
||||||
super(ordering, channels, fields, binningStrategyEnum, binLengthOrCount, aggregateChannels, aggregationType,
|
|
||||||
aggregations, queryRange);
|
|
||||||
|
|
||||||
if (startDateTime != null && endDateTime != null) {
|
|
||||||
logger.info("startDateTime and endDateTime specified. This takes precedence over the start / end fields.");
|
|
||||||
try {
|
|
||||||
Date startDate = DATE_FORMAT.parse(startDateTime);
|
|
||||||
Date endDate = DATE_FORMAT.parse(endDateTime);
|
|
||||||
|
|
||||||
getQueryRange().setTimeRange(startDate.getTime(), queryRange.getStartNanos(), endDate.getTime(),
|
|
||||||
queryRange.getEndNanos());
|
|
||||||
} catch (ParseException e) {
|
|
||||||
logger.error("Parsing the start- and/or endDate was unsuccessful. "
|
|
||||||
+ "The format must be '" + DATE_FORMAT_STRING + "'", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return ToStringBuilder.reflectionToString(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,7 +1,7 @@
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
package ch.psi.daq.rest;
|
package ch.psi.daq.rest.response;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@ -14,9 +14,6 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import ch.psi.daq.domain.cassandra.DataEvent;
|
|
||||||
import ch.psi.daq.rest.queries.AbstractQuery;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonEncoding;
|
import com.fasterxml.jackson.core.JsonEncoding;
|
||||||
import com.fasterxml.jackson.core.JsonFactory;
|
import com.fasterxml.jackson.core.JsonFactory;
|
||||||
import com.fasterxml.jackson.core.JsonGenerator;
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
@ -25,6 +22,9 @@ import com.fasterxml.jackson.databind.ObjectWriter;
|
|||||||
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
|
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
|
||||||
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
|
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
|
||||||
|
|
||||||
|
import ch.psi.daq.domain.cassandra.DataEvent;
|
||||||
|
import ch.psi.daq.hazelcast.query.AbstractQuery;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||||
* of the current request.
|
* of the current request.
|
||||||
@ -39,6 +39,8 @@ public class ResponseStreamWriter {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ObjectMapper mapper;
|
private ObjectMapper mapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private Set<String> defaultResponseFields;
|
||||||
/**
|
/**
|
||||||
* Responding with the the contents of the stream by writing into the output stream of the
|
* Responding with the the contents of the stream by writing into the output stream of the
|
||||||
* {@link ServletResponse}.
|
* {@link ServletResponse}.
|
||||||
@ -51,6 +53,9 @@ public class ResponseStreamWriter {
|
|||||||
public void respond(Stream<DataEvent> stream, AbstractQuery query, ServletResponse response) throws IOException {
|
public void respond(Stream<DataEvent> stream, AbstractQuery query, ServletResponse response) throws IOException {
|
||||||
|
|
||||||
Set<String> includedFields = query.getFields();
|
Set<String> includedFields = query.getFields();
|
||||||
|
if (includedFields == null) {
|
||||||
|
includedFields = defaultResponseFields;
|
||||||
|
}
|
||||||
|
|
||||||
if (query.getAggregations() != null) {
|
if (query.getAggregations() != null) {
|
||||||
includedFields.addAll(query.getAggregations()
|
includedFields.addAll(query.getAggregations()
|
@ -1,6 +1,6 @@
|
|||||||
# port for the Spring boot application's embedded Tomcat server
|
# port for the Spring boot application's embedded Tomcat server
|
||||||
server.port=8080
|
server.port=8080
|
||||||
|
|
||||||
# defines the list of hosts who are tried for an initial connection to the cluster
|
# defines the fields that are included in the response
|
||||||
#hazelcast.members=sf-nube-11.psi.ch,sf-nube-12.psi.ch,sf-nube-13.psi.ch,sf-nube-14.psi.ch
|
# if no fields have been specified by the user
|
||||||
hazelcast.initialcandidates=localhost
|
rest.default.response.fields=channel,pulseId,globalMillis,globalNanos,value
|
@ -24,7 +24,7 @@ import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutio
|
|||||||
@TestExecutionListeners({
|
@TestExecutionListeners({
|
||||||
CassandraDaqUnitDependencyInjectionTestExecutionListener.class,
|
CassandraDaqUnitDependencyInjectionTestExecutionListener.class,
|
||||||
DependencyInjectionTestExecutionListener.class})
|
DependencyInjectionTestExecutionListener.class})
|
||||||
@SpringApplicationConfiguration(classes = {RestApplication.class, DaqWebMvcConfiguration.class})
|
@SpringApplicationConfiguration(classes = {RestApplication.class, DaqWebMvcConfig.class})
|
||||||
//@EmbeddedCassandra
|
//@EmbeddedCassandra
|
||||||
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
|
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
|
||||||
@WebAppConfiguration
|
@WebAppConfiguration
|
||||||
|
17
src/test/java/ch/psi/daq/test/rest/DaqWebMvcConfig.java
Normal file
17
src/test/java/ch/psi/daq/test/rest/DaqWebMvcConfig.java
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
package ch.psi.daq.test.rest;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Import;
|
||||||
|
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||||
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
|
||||||
|
|
||||||
|
import ch.psi.daq.rest.config.RestConfig;
|
||||||
|
import ch.psi.daq.test.cassandra.LocalCassandraTestConfig;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@Import(value = {LocalCassandraTestConfig.class, RestConfig.class})
|
||||||
|
@EnableWebMvc
|
||||||
|
public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
|
||||||
|
|
||||||
|
// add test-specific beans and configurations here
|
||||||
|
}
|
@ -1,24 +0,0 @@
|
|||||||
package ch.psi.daq.test.rest;
|
|
||||||
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
import org.springframework.context.annotation.Import;
|
|
||||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
|
||||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
|
|
||||||
|
|
||||||
import ch.psi.daq.rest.config.RestConfiguration;
|
|
||||||
import ch.psi.daq.test.cassandra.LocalCassandraTestConfig;
|
|
||||||
|
|
||||||
@Configuration
|
|
||||||
//@ComponentScan(basePackages = {
|
|
||||||
// "ch.psi.daq.rest",
|
|
||||||
// "ch.psi.daq.cassandra.config", // define the package name with the CassandraConfig
|
|
||||||
// // configuration, or @Import it (see above)
|
|
||||||
// "ch.psi.daq.cassandra.reader",
|
|
||||||
// "ch.psi.daq.cassandra.writer"
|
|
||||||
//})
|
|
||||||
@Import(value = {LocalCassandraTestConfig.class, RestConfiguration.class})
|
|
||||||
@EnableWebMvc
|
|
||||||
public class DaqWebMvcConfiguration extends WebMvcConfigurationSupport {
|
|
||||||
|
|
||||||
// add test-specific beans and configurations here
|
|
||||||
}
|
|
@ -10,19 +10,19 @@ import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
|
|||||||
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
|
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
|
||||||
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
|
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
|
||||||
import ch.psi.daq.rest.queries.BinningStrategyEnum;
|
|
||||||
import ch.psi.daq.rest.queries.PulseRangeQuery;
|
|
||||||
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
|
||||||
import ch.psi.daq.test.rest.AbstractDaqRestTest;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
|
import ch.psi.daq.hazelcast.query.BinningStrategyEnum;
|
||||||
|
import ch.psi.daq.hazelcast.query.PulseRangeQuery;
|
||||||
|
import ch.psi.daq.hazelcast.query.TimeRangeQuery;
|
||||||
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
|
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
||||||
|
import ch.psi.daq.test.rest.AbstractDaqRestTest;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests the {@link DaqController} implementation.
|
* Tests the {@link DaqController} implementation.
|
||||||
*/
|
*/
|
||||||
|
@ -8,18 +8,18 @@ import org.junit.Test;
|
|||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.BlockJUnit4ClassRunner;
|
import org.junit.runners.BlockJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
|
import ch.psi.daq.hazelcast.query.BinningStrategyEnum;
|
||||||
|
import ch.psi.daq.hazelcast.query.TimeRangeQuery;
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategyBinCount;
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategyBinCount;
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategyLengthPulse;
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategyLengthPulse;
|
||||||
import ch.psi.daq.hazelcast.query.bin.BinningStrategyLengthTime;
|
import ch.psi.daq.hazelcast.query.bin.BinningStrategyLengthTime;
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
||||||
import ch.psi.daq.rest.queries.BinningStrategyEnum;
|
|
||||||
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
|
|
||||||
@RunWith(BlockJUnit4ClassRunner.class)
|
@RunWith(BlockJUnit4ClassRunner.class)
|
||||||
public class AbstractQueryTestTest extends AbstractQueryTest {
|
public class AbstractQueryTestTest extends AbstractQueryTest {
|
||||||
|
@ -8,16 +8,16 @@ import org.junit.Test;
|
|||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.BlockJUnit4ClassRunner;
|
import org.junit.runners.BlockJUnit4ClassRunner;
|
||||||
|
|
||||||
import ch.psi.daq.cassandra.reader.Ordering;
|
|
||||||
import ch.psi.daq.hazelcast.query.AggregationType;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
|
||||||
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
|
||||||
import ch.psi.daq.rest.queries.BinningStrategyEnum;
|
|
||||||
import ch.psi.daq.rest.queries.TimeRangeQuery;
|
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
import ch.psi.daq.cassandra.reader.Ordering;
|
||||||
|
import ch.psi.daq.hazelcast.query.AggregationType;
|
||||||
|
import ch.psi.daq.hazelcast.query.BinningStrategyEnum;
|
||||||
|
import ch.psi.daq.hazelcast.query.TimeRangeQuery;
|
||||||
|
import ch.psi.daq.hazelcast.query.range.QueryRange;
|
||||||
|
import ch.psi.daq.hazelcast.query.range.QueryRangeImpl;
|
||||||
|
|
||||||
@RunWith(BlockJUnit4ClassRunner.class)
|
@RunWith(BlockJUnit4ClassRunner.class)
|
||||||
public class TimeRangeQueryTest extends AbstractQueryTest {
|
public class TimeRangeQueryTest extends AbstractQueryTest {
|
||||||
|
|
||||||
|
@ -10,14 +10,14 @@ import java.util.stream.Collectors;
|
|||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||||
import ch.psi.daq.domain.cassandra.DataEvent;
|
import ch.psi.daq.domain.cassandra.DataEvent;
|
||||||
import ch.psi.daq.hazelcast.query.Query;
|
import ch.psi.daq.hazelcast.query.Query;
|
||||||
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
import ch.psi.daq.hazelcast.query.processor.QueryProcessor;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zellweger_c
|
* @author zellweger_c
|
||||||
*
|
*
|
||||||
|
Reference in New Issue
Block a user