diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs index 2ce877b..dc55a29 100644 --- a/.settings/org.eclipse.jdt.core.prefs +++ b/.settings/org.eclipse.jdt.core.prefs @@ -1,5 +1,5 @@ # -#Mon Jul 20 15:36:19 CEST 2015 +#Fri Jul 24 14:48:03 CEST 2015 org.eclipse.jdt.core.compiler.debug.localVariable=generate org.eclipse.jdt.core.compiler.compliance=1.8 org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve diff --git a/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java b/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java index 7e49ccd..c03ce72 100644 --- a/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java +++ b/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java @@ -3,7 +3,10 @@ package ch.psi.daq.queryrest.config; import java.util.LinkedHashSet; import java.util.Set; -import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Resource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -13,12 +16,17 @@ import org.springframework.util.StringUtils; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import ch.psi.daq.cassandra.util.test.CassandraDataGen; +import ch.psi.daq.common.json.deserialize.AttributeBasedDeserializer; import ch.psi.daq.common.statistic.StorelessStatistics; import ch.psi.daq.domain.cassandra.ChannelEvent; -import ch.psi.daq.query.config.QueryClientConfig; import ch.psi.daq.query.config.QueryConfig; +import ch.psi.daq.query.model.AbstractQuery; +import ch.psi.daq.query.model.QueryField; import ch.psi.daq.queryrest.model.PropertyFilterMixin; import ch.psi.daq.queryrest.response.ResponseStreamWriter; @@ -27,18 +35,44 @@ import ch.psi.daq.queryrest.response.ResponseStreamWriter; @PropertySource(value = {"file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true) public class QueryRestConfig { - @Autowired - private Environment env; - // a nested configuration // this guarantees that the ordering of the properties file is as expected // see: // https://jira.spring.io/browse/SPR-10409?focusedCommentId=101393&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-101393 @Configuration - @Import({QueryConfig.class, QueryClientConfig.class}) + @Import({QueryConfig.class}) static class InnerConfiguration { } + private static final Logger LOGGER = LoggerFactory.getLogger(QueryRestConfig.class); + + public static final String BEAN_NAME_DEFAULT_RESPONSE_FIELDS = "defaultResponseFields"; + + @Resource + private Environment env; + + @Bean + public ObjectMapper objectMapper() { + ObjectMapper mapper = new ObjectMapper(); + + String packageName = AbstractQuery.class.getPackage().getName(); + Class abstractQueryClass = AbstractQuery.class; + AttributeBasedDeserializer abstractQueryDeserializer = + new AttributeBasedDeserializer(abstractQueryClass).register(packageName); + SimpleModule module = new SimpleModule("PolymorphicAbstractQuery", Version.unknownVersion()); + module.addDeserializer(abstractQueryClass, abstractQueryDeserializer); + mapper.registerModule(module); + + // only include non-null values + mapper.setSerializationInclusion(Include.NON_NULL); + // Mixin which is used dynamically to filter out which properties get serialised and which + // won't. This way, the user can specify which columns are to be received. + mapper.addMixIn(ChannelEvent.class, PropertyFilterMixin.class); + mapper.addMixIn(StorelessStatistics.class, PropertyFilterMixin.class); + + return mapper; + } + @Bean public JsonFactory jsonFactory() { return new JsonFactory(); @@ -49,27 +83,28 @@ public class QueryRestConfig { return new ResponseStreamWriter(); } - @Bean - public ObjectMapper objectMapper() { - ObjectMapper mapper = new ObjectMapper(); - // only include non-null values - mapper.setSerializationInclusion(Include.NON_NULL); - // Mixin which is used dynamically to filter out which properties get serialised and which - // won't. This way, the user can specify which columns are to be received. - mapper.addMixIn(ChannelEvent.class, PropertyFilterMixin.class); - mapper.addMixIn(StorelessStatistics.class, PropertyFilterMixin.class); - return mapper; - } - - @Bean - public Set defaultResponseFields() { - String[] responseFields = StringUtils.commaDelimitedListToStringArray(env.getProperty("queryrest.default.response.fields")); + @Bean(name = BEAN_NAME_DEFAULT_RESPONSE_FIELDS) + public Set defaultResponseFields() { + String[] responseFields = + StringUtils.commaDelimitedListToStringArray(env.getProperty("queryrest.default.response.fields")); // preserve order - LinkedHashSet defaultResponseFields = new LinkedHashSet<>(responseFields.length); - for (String field : defaultResponseFields) { - defaultResponseFields.add(field); + LinkedHashSet defaultResponseFields = new LinkedHashSet<>(responseFields.length); + for (String field : responseFields) { + try { + defaultResponseFields.add(QueryField.valueOf(field)); + } catch (Exception e) { + LOGGER.error("Field '{}' in queryrest.default.response.fields is invalid.", field, e); + } } return defaultResponseFields; } + + + // ========================================================================================== + // TODO: This is simply for initial / rudimentary testing - remove once further evolved + @Bean + public CassandraDataGen cassandraDataGen() { + return new CassandraDataGen(); + } } diff --git a/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java b/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java index 4bd8cc1..2e6da3f 100644 --- a/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java +++ b/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java @@ -1,145 +1,114 @@ package ch.psi.daq.queryrest.controller; import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.function.LongFunction; -import java.util.function.LongUnaryOperator; -import java.util.stream.Collectors; -import java.util.stream.LongStream; +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.Map.Entry; +import java.util.Set; import java.util.stream.Stream; +import javax.annotation.Resource; import javax.servlet.http.HttpServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.util.Assert; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; -import ch.psi.daq.cassandra.writer.CassandraWriter; -import ch.psi.daq.domain.DataType; -import ch.psi.daq.domain.cassandra.ChannelEvent; +import ch.psi.daq.cassandra.util.test.CassandraDataGen; import ch.psi.daq.domain.cassandra.DataEvent; import ch.psi.daq.query.model.AbstractQuery; -import ch.psi.daq.query.model.PulseRangeQuery; -import ch.psi.daq.query.model.TimeRangeQuery; +import ch.psi.daq.query.model.QueryField; import ch.psi.daq.query.processor.QueryProcessor; +import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.response.ResponseStreamWriter; @RestController public class QueryRestController { - private static final Logger logger = LoggerFactory.getLogger(QueryRestController.class); - - @Autowired - private CassandraWriter cassandraWriter; - - @Autowired + private static final Logger LOGGER = LoggerFactory.getLogger(QueryRestController.class); + + public static final String CHANNELS = "channels"; + public static final String CHANNELS_REGEX = CHANNELS + "/{regex}"; + public static final String QUERY = "query"; + + @Resource private ResponseStreamWriter responseStreamWriter; - @Autowired + @Resource private QueryProcessor queryProcessor; - - @RequestMapping(value = "/pulserange") - public void pulseRange(@RequestBody PulseRangeQuery query, HttpServletResponse res) throws IOException { + @Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS) + private Set defaultResponseFields; - logger.debug("PulseRangeQuery received: {}", query); - - executeQuery(query, res); + @RequestMapping( + value = CHANNELS, + method = RequestMethod.GET, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public @ResponseBody Collection getChannels() throws Throwable { + try { + return queryProcessor.getChannels(); + } catch (Throwable t) { + LOGGER.error("Failed to query channel names.", t); + throw t; + } } - - @RequestMapping(value = "/timerange") - public void timeRange(@RequestBody TimeRangeQuery query, HttpServletResponse res) throws IOException { - - logger.debug("TimeRangeQuery received: {}", query); - - executeQuery(query, res); + @RequestMapping( + value = CHANNELS_REGEX, + method = RequestMethod.GET, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public @ResponseBody Collection getChannels(@PathVariable String regex) throws Throwable { + try { + return queryProcessor.getChannels(regex); + } catch (Throwable t) { + LOGGER.error("Failed to query channel names with regex '{}'.", regex, t); + throw t; + } } - - private void executeQuery(AbstractQuery query, HttpServletResponse res) throws IOException { + @RequestMapping( + value = QUERY, + method = RequestMethod.GET, + consumes = {MediaType.APPLICATION_JSON_VALUE}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void executeQuery(@RequestBody AbstractQuery query, HttpServletResponse res) throws IOException { + try { + LOGGER.debug("Execute query '{}'", query.getClass().getSimpleName()); - // all the magic happens here - Map> process = queryProcessor.process(query); + validateQuery(query); - Stream flatStreams = process.values().stream().flatMap(s -> { - return s; - }); + // all the magic happens here + Stream>> process = queryProcessor.process(query); - // write the response back to the client using java 8 streams - responseStreamWriter.respond(flatStreams, query, res); + // write the response back to the client using java 8 streams + responseStreamWriter.respond(process, query, res); + } catch (Throwable t) { + LOGGER.error("Failed execute query '{}'.", query, t); + throw t; + } + } + + // TODO: can we do this with built-in validators? + private void validateQuery(AbstractQuery query) { + if (query.getFields() == null || query.getFields().isEmpty()) { + query.setFields(new LinkedHashSet<>(defaultResponseFields)); + } } // ========================================================================================== - // TODO this is simply a method for initial / rudimentary testing - remove once further evolved + // TODO: This is simply for initial / rudimentary testing - remove once further evolved + + @Resource + private CassandraDataGen cassandraDataGen; + @RequestMapping(value = "/write") - public long writeDummyEntry() { - - long startIndex = System.currentTimeMillis(); - - writeData(3,startIndex, 100, new String[]{"channel1", "channel2"}); - - return startIndex; - } - - private void writeData(int dataReplication, long startIndex, long nrOfElements, - String... channelNames) { - writeData(dataReplication, startIndex, nrOfElements, - i -> 2 * i, - i -> 2 * i, - i -> 2 * i, - i -> 2 * i, - i -> 2 * i, - i -> Long.valueOf(2 * i), - channelNames); - } - - private void writeData(int dataReplication, long startIndex, long nrOfElements, - LongUnaryOperator iocMillis, LongUnaryOperator iocNanos, LongUnaryOperator pulseIds, - LongUnaryOperator globalMillis, LongUnaryOperator globalNanos, LongFunction valueFunction, - String... channelNames) { - - Assert.notNull(channelNames); - - CompletableFuture future; - - List events = - Arrays.stream(channelNames) - .parallel() - .flatMap( - channelName -> { - Stream stream = - LongStream - .range(startIndex, startIndex + nrOfElements) - .parallel() - .mapToObj( - i -> { - Object value = valueFunction.apply(i); - return new ChannelEvent(channelName, - iocMillis.applyAsLong(i), - iocNanos.applyAsLong(i), - pulseIds.applyAsLong(i), - globalMillis.applyAsLong(i), - globalNanos.applyAsLong(i), - value, - DataType.getTypeName(value.getClass()) - ); - } - ); - - return stream; - }) - .collect(Collectors.toList()); - - future = cassandraWriter.writeAsync(dataReplication, (int) TimeUnit.HOURS.toSeconds(1), events); - future.join(); + public void writeDummyEntry() { + cassandraDataGen.writeData(3, 0, 100, "channel1", "channel2"); } } diff --git a/src/main/java/ch/psi/daq/queryrest/model/PropertyFilterMixin.java b/src/main/java/ch/psi/daq/queryrest/model/PropertyFilterMixin.java index 62a1af0..5779cfd 100644 --- a/src/main/java/ch/psi/daq/queryrest/model/PropertyFilterMixin.java +++ b/src/main/java/ch/psi/daq/queryrest/model/PropertyFilterMixin.java @@ -3,7 +3,7 @@ package ch.psi.daq.queryrest.model; import com.fasterxml.jackson.annotation.JsonFilter; /** - * + * Kind of marker for ObjectMapper MixIn */ @JsonFilter("namedPropertyFilter") public class PropertyFilterMixin { diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java index e8f5ad1..25a8053 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java @@ -2,14 +2,16 @@ package ch.psi.daq.queryrest.response; import java.io.IOException; import java.util.LinkedHashSet; +import java.util.List; import java.util.Set; +import java.util.Map.Entry; import java.util.stream.Stream; +import javax.annotation.Resource; import javax.servlet.ServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; @@ -22,6 +24,7 @@ import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import ch.psi.daq.domain.cassandra.DataEvent; import ch.psi.daq.query.model.AbstractQuery; import ch.psi.daq.query.model.AggregationEnum; +import ch.psi.daq.query.model.QueryField; /** * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} @@ -31,15 +34,12 @@ public class ResponseStreamWriter { private static final Logger logger = LoggerFactory.getLogger(ResponseStreamWriter.class); - @Autowired + @Resource private JsonFactory jsonFactory; - @Autowired + @Resource private ObjectMapper mapper; - @Autowired - private Set defaultResponseFields; - /** * Responding with the the contents of the stream by writing into the output stream of the * {@link ServletResponse}. @@ -49,16 +49,26 @@ public class ResponseStreamWriter { * @param response {@link ServletResponse} instance given by the current HTTP request * @throws IOException thrown if writing to the output stream fails */ - public void respond(Stream stream, AbstractQuery query, ServletResponse response) throws IOException { + public void respond(Stream>> stream, AbstractQuery query, + ServletResponse response) throws IOException { - Set includedFields = query.getFieldsOrDefault(defaultResponseFields); + Set queryFields = query.getFields(); + List aggregations = query.getAggregations(); - if (query.getAggregations() != null) { - includedFields = new LinkedHashSet(includedFields); + Set includedFields = + new LinkedHashSet(queryFields.size() + (aggregations != null ? aggregations.size() : 0)); + + for (QueryField field : queryFields) { + includedFields.add(field.name()); + } + if (aggregations != null) { for (AggregationEnum aggregation : query.getAggregations()) { includedFields.add(aggregation.name()); } } + // do not write channel since it is already provided as key in mapping + includedFields.remove(QueryField.channel.name()); + ObjectWriter writer = configureWriter(includedFields); respondInternal(stream, response, writer); } @@ -87,20 +97,39 @@ public class ResponseStreamWriter { * @param writer configured writer that includes the fields the end user wants to see * @throws IOException thrown if writing to the output stream fails */ - private void respondInternal(Stream stream, ServletResponse response, ObjectWriter writer) + private void respondInternal(Stream>> stream, ServletResponse response, + ObjectWriter writer) throws IOException { JsonGenerator generator = jsonFactory.createGenerator(response.getOutputStream(), JsonEncoding.UTF8); generator.writeStartArray(); - stream.forEach(ds -> { - try { - logger.trace("Writing value for: {}", ds); - // use the writer created just before - writer.writeValue(generator, ds); - } catch (Exception e) { - logger.error("", e); - } - }); + stream + /* ensure elements are sequentially written to the stream */ + .sequential() + .forEach( + entry -> { + try { + generator.writeStartObject(); + generator.writeStringField(QueryField.channel.name(), entry.getKey()); + generator.writeArrayFieldStart("values"); + entry.getValue() + /* ensure elements are sequentially written to the stream */ + .sequential() + .forEach( + dataEvent -> { + try { + writer.writeValue(generator, dataEvent); + } catch (Exception e) { + logger.error("Could not write event with pulse-id '{}' of channel '{}'", + dataEvent.getPulseId(), entry.getKey(), e); + } + }); + generator.writeEndArray(); + generator.writeEndObject(); + } catch (Exception e) { + logger.error("Could not write channel name of channel '{}'", entry.getKey(), e); + } + }); generator.writeEndArray(); generator.flush(); generator.close(); diff --git a/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java b/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java index 8a12d6e..5013654 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java @@ -1,12 +1,13 @@ package ch.psi.daq.test.queryrest; +import javax.annotation.Resource; + import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.MockitoAnnotations; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext.ClassMode; +import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; @@ -17,26 +18,26 @@ import org.springframework.web.context.WebApplicationContext; import com.fasterxml.jackson.databind.ObjectMapper; -import ch.psi.daq.queryrest.QueryRestApplication; import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutionListener; @TestExecutionListeners({ CassandraDaqUnitDependencyInjectionTestExecutionListener.class, DependencyInjectionTestExecutionListener.class}) -@SpringApplicationConfiguration(classes = {QueryRestApplication.class, DaqWebMvcConfig.class}) +//@SpringApplicationConfiguration(classes = {QueryRestApplication.class, DaqWebMvcConfig.class}) //@EmbeddedCassandra @DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD) @WebAppConfiguration +@ContextConfiguration(classes = DaqWebMvcConfig.class) @RunWith(SpringJUnit4ClassRunner.class) public abstract class AbstractDaqRestTest { - @Autowired + @Resource protected WebApplicationContext webApplicationContext; protected MockMvc mockMvc; - @Autowired + @Resource protected ObjectMapper mapper; @Before diff --git a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java b/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java index 8a8ad74..2b463fd 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java +++ b/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java @@ -3,20 +3,27 @@ package ch.psi.daq.test.queryrest; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.PropertySources; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; import ch.psi.daq.query.processor.QueryProcessor; -import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.test.cassandra.config.LocalCassandraTestConfig; +import ch.psi.daq.test.query.config.LocalQueryTestConfig; import ch.psi.daq.test.queryrest.query.DummyQueryProcessor; @Configuration -@Import(value = {LocalCassandraTestConfig.class, QueryRestConfig.class}) +@PropertySources(value = { + @PropertySource(value = {"classpath:queryrest-test.properties"}) +}) @EnableWebMvc public class DaqWebMvcConfig extends WebMvcConfigurationSupport { - // add test-specific beans and configurations here + // ensure that properties in dispatcher.properties are loaded first and then overwritten by the + // properties in dispatcher-test.properties + @Import(value = {LocalQueryTestConfig.class}) + static class InnerConfiguration { + } @Bean public QueryProcessor queryProcessor() { diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java index 772370a..330987b 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java @@ -1,23 +1,14 @@ package ch.psi.daq.test.queryrest.controller; -import java.util.Date; -import java.util.List; -import java.util.concurrent.TimeUnit; - import org.junit.Test; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; import org.springframework.test.web.servlet.result.MockMvcResultHandlers; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -import ch.psi.daq.common.ordering.Ordering; -import ch.psi.daq.query.model.AggregationType; import ch.psi.daq.query.model.PulseRangeQuery; import ch.psi.daq.query.model.TimeRangeQuery; +import ch.psi.daq.query.model.TimeRangeQueryDate; import ch.psi.daq.test.queryrest.AbstractDaqRestTest; /** @@ -25,24 +16,17 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest; */ public class DaqRestControllerTest extends AbstractDaqRestTest { - private static final List DEFAULT_PROPERTIES = Lists.newArrayList("channel", "pulseId"); - @Test public void testPulseRangeQuery() throws Exception { PulseRangeQuery request = new PulseRangeQuery( 100, 101 ); - request.setOrdering(Ordering.desc); - request.setFields(Sets.newLinkedHashSet(DEFAULT_PROPERTIES)); - request.setNrOfBins(100); - request.setAggregateChannels(false); - request.setAggregationType(AggregationType.index); String content = mapper.writeValueAsString(request); this.mockMvc - .perform(MockMvcRequestBuilders.post("/pulserange") + .perform(MockMvcRequestBuilders.post("/query") .contentType(MediaType.APPLICATION_JSON) .content(content)) @@ -50,30 +34,27 @@ public class DaqRestControllerTest extends AbstractDaqRestTest { .andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) - .andExpect(MockMvcResultMatchers.jsonPath("$[0].pulseId").value(100)) - .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1")); + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2")) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101)); } @Test public void testTimeRangeQuery() throws Exception { - long startTime = new Date().getTime(); - long endTime = startTime + TimeUnit.SECONDS.toMillis(1); - TimeRangeQuery request = new TimeRangeQuery( - startTime, - endTime, - "test"); - request.setOrdering(Ordering.asc); - request.setFields(Sets.newLinkedHashSet(DEFAULT_PROPERTIES)); - request.setNrOfBins(100); - request.setAggregateChannels(false); - request.setAggregationType(AggregationType.index); + 100, + 101); String content = mapper.writeValueAsString(request); - System.out.println(content); this.mockMvc - .perform(MockMvcRequestBuilders.post("/timerange") + .perform(MockMvcRequestBuilders.post("/query") .contentType(MediaType.APPLICATION_JSON) .content(content)) @@ -81,24 +62,44 @@ public class DaqRestControllerTest extends AbstractDaqRestTest { .andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) - .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1")); + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2")) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101)); } @Test - public void testMapper() throws JsonProcessingException { - long startTime = new Date().getTime(); - long endTime = startTime + TimeUnit.SECONDS.toMillis(1); - TimeRangeQuery request = new TimeRangeQuery( - startTime, - endTime, - "test"); - request.setOrdering(Ordering.asc); - request.setFields(Sets.newLinkedHashSet(DEFAULT_PROPERTIES)); - request.setNrOfBins(100); - request.setAggregateChannels(false); - request.setAggregationType(AggregationType.index); + public void testMapper() throws Exception { + String startDate = TimeRangeQueryDate.format(100); + String endDate = TimeRangeQueryDate.format(101); + TimeRangeQueryDate request = new TimeRangeQueryDate( + startDate, + endDate); String content = mapper.writeValueAsString(request); - System.out.println(content); + + this.mockMvc + .perform(MockMvcRequestBuilders.post("/query") + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2")) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100)) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101)); } } diff --git a/src/test/java/ch/psi/daq/test/queryrest/query/DummyQueryProcessor.java b/src/test/java/ch/psi/daq/test/queryrest/query/DummyQueryProcessor.java index f83a045..88a9a5a 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/query/DummyQueryProcessor.java +++ b/src/test/java/ch/psi/daq/test/queryrest/query/DummyQueryProcessor.java @@ -3,9 +3,12 @@ */ package ch.psi.daq.test.queryrest.query; +import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.UUID; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -24,20 +27,37 @@ import ch.psi.daq.query.processor.QueryProcessor; */ public class DummyQueryProcessor implements QueryProcessor { - private static final List TEST_CHANNEL_NAMES = Lists.newArrayList("testChannel1", "testChannel2"); + public static final List TEST_CHANNEL_NAMES = Lists.newArrayList("testChannel1", "testChannel2"); - /** {@inheritDoc} + @Override + public Collection getChannels() { + return TEST_CHANNEL_NAMES; + } + + @Override + public Collection getChannels(String regex) { + if (regex != null) { + Pattern pattern = Pattern.compile(regex); + return TEST_CHANNEL_NAMES.stream().filter(channel -> pattern.matcher(channel).find()) + .collect(Collectors.toList()); + } else { + return getChannels(); + } + } + + /** + * {@inheritDoc} */ @Override - public Map> process(Query query) { - + public Stream>> process(Query query) { + int nrOfEntries = 2; int startingPulseId = 100; - + Map> result = Maps.newHashMap(); - - TEST_CHANNEL_NAMES.forEach(chName ->{ - + + TEST_CHANNEL_NAMES.forEach(chName -> { + List entities = IntStream.rangeClosed(1, nrOfEntries).boxed().map(i -> { long pulseId = startingPulseId + i - 1; return new ChannelEvent( @@ -49,11 +69,10 @@ public class DummyQueryProcessor implements QueryProcessor { 0, "data_" + UUID.randomUUID().toString()); }).collect(Collectors.toList()); - + result.put(chName, entities.stream()); }); - - return result; - } + return result.entrySet().stream(); + } } diff --git a/src/test/resources/queryrest-test.properties b/src/test/resources/queryrest-test.properties new file mode 100644 index 0000000..e69de29