executeChannelInfoQuery(@RequestBody ChannelNameRequest request)
+ throws Throwable {
+ return queryManager.getChannelInfos(request);
}
/**
@@ -256,7 +129,7 @@ public class QueryRestController {
* {@link #executeQuery(DAQQuery, HttpServletResponse)} fails
*/
@RequestMapping(
- value = PATH_QUERY,
+ value = DomainConfig.PATH_QUERY,
method = RequestMethod.GET)
public void executeQueryBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception {
DAQQuery query = objectMapper.readValue(jsonBody, DAQQuery.class);
@@ -271,7 +144,7 @@ public class QueryRestController {
* @throws Exception thrown if writing to the output stream fails
*/
@RequestMapping(
- value = PATH_QUERY,
+ value = DomainConfig.PATH_QUERY,
method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE})
public void executeQuery(@RequestBody @Valid DAQQuery query, HttpServletResponse res) throws Exception {
@@ -288,7 +161,7 @@ public class QueryRestController {
* {@link #executeQueries(DAQQueries, HttpServletResponse)} fails
*/
@RequestMapping(
- value = PATH_QUERIES,
+ value = DomainConfig.PATH_QUERIES,
method = RequestMethod.GET)
public void executeQueriesBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception {
DAQQueries queries = objectMapper.readValue(jsonBody, DAQQueries.class);
@@ -299,8 +172,8 @@ public class QueryRestController {
* Catch-all query method for getting data from the backend for both JSON and CSV requests.
*
* The {@link DAQQueries} object will contain the concrete subclass based on the combination of
- * fields defined in the user's query. The AttributeBasedDeserializer decides which class
- * to deserialize the information into and has been configured (see
+ * fields defined in the user's query. The AttributeBasedDeserializer decides which class to
+ * deserialize the information into and has been configured (see
* QueryRestConfig#afterPropertiesSet) accordingly.
*
* @param queries the {@link DAQQueries}
@@ -308,7 +181,7 @@ public class QueryRestController {
* @throws Exception thrown if writing to the output stream fails
*/
@RequestMapping(
- value = PATH_QUERIES,
+ value = DomainConfig.PATH_QUERIES,
method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE})
public void executeQueries(@RequestBody @Valid DAQQueries queries, HttpServletResponse res) throws Exception {
@@ -396,7 +269,7 @@ public class QueryRestController {
public @ResponseBody List getDBModeValues() {
return Lists.newArrayList(Backend.values());
}
-
+
/**
* Returns the current list of {@link Compression}s available.
*
diff --git a/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java b/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java
index 597f96c..2d5a0ba 100644
--- a/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java
+++ b/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java
@@ -1,5 +1,6 @@
package ch.psi.daq.queryrest.query;
+import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
@@ -7,16 +8,20 @@ import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Triple;
import ch.psi.daq.domain.json.ChannelName;
+import ch.psi.daq.domain.json.channels.info.ChannelInfos;
+import ch.psi.daq.domain.query.ChannelNameRequest;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.channels.ChannelsResponse;
-import ch.psi.daq.query.model.impl.BackendQuery;
public interface QueryManager {
List getChannels(ChannelsRequest request) throws Exception;
-
- List>>> executeQueries(DAQQueries queries)
+
+ Collection getChannelInfos(ChannelNameRequest request) throws Exception;
+
+ List>>> getEvents(DAQQueries queries)
throws Exception;
-}
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java b/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java
index 02bf407..1059e26 100644
--- a/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java
+++ b/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java
@@ -1,9 +1,8 @@
package ch.psi.daq.queryrest.query;
import java.util.ArrayList;
-import java.util.LinkedHashMap;
+import java.util.Collection;
import java.util.List;
-import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -19,83 +18,45 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
-import ch.psi.daq.cassandra.config.CassandraConfig;
import ch.psi.daq.domain.DataEvent;
+import ch.psi.daq.domain.backend.BackendAccess;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
+import ch.psi.daq.domain.json.channels.info.ChannelInfos;
+import ch.psi.daq.domain.query.ChannelNameRequest;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
+import ch.psi.daq.domain.query.backend.BackendQueryImpl;
+import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
+import ch.psi.daq.domain.query.channels.ChannelNameCache;
import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.channels.ChannelsResponse;
-import ch.psi.daq.domain.reader.Backend;
-import ch.psi.daq.query.analyzer.QueryAnalyzer;
-import ch.psi.daq.query.config.QueryConfig;
-import ch.psi.daq.query.model.Query;
-import ch.psi.daq.query.model.impl.BackendQuery;
-import ch.psi.daq.query.processor.ChannelNameCache;
-import ch.psi.daq.query.processor.QueryProcessor;
+import ch.psi.daq.domain.query.processor.QueryProcessor;
+import ch.psi.daq.queryrest.query.model.ChannelInfosStreamImpl;
public class QueryManagerImpl implements QueryManager {
+ @SuppressWarnings("unused")
private static final Logger LOGGER = LoggerFactory.getLogger(QueryManagerImpl.class);
-
+
@Resource
private ApplicationContext appContext;
-
+
@Resource
- private Function queryAnalizerFactory;
-
- private Map queryProcessors = new LinkedHashMap<>();
+ private Function queryAnalizerFactory;
+
+ @Resource(name = DomainConfig.BEAN_NAME_BACKEND_ACCESS)
+ private BackendAccess backendAccess;
+
+ @Resource(name = DomainConfig.BEAN_NAME_CHANNEL_NAME_CACHE)
private ChannelNameCache channelNameCache;
@PostConstruct
- public void afterPropertiesSet() {
- List exceptions = new ArrayList<>();
+ public void afterPropertiesSet() {}
- try {
- QueryProcessor queryProcessor =
- appContext.getBean(QueryConfig.BEAN_NAME_CASSANDRA_QUERY_PROCESSOR, QueryProcessor.class);
- queryProcessors.put(queryProcessor.getBackend(), queryProcessor);
- } catch (Exception e) {
- exceptions.add(e);
- LOGGER.warn("");
- LOGGER.warn("##########");
- LOGGER.warn("Could not load query processor for cassandra.");
- LOGGER.warn("##########");
- LOGGER.warn("");
- }
-
- try {
- QueryProcessor queryProcessor =
- appContext.getBean(QueryConfig.BEAN_NAME_ARCHIVER_APPLIANCE_QUERY_PROCESSOR, QueryProcessor.class);
- queryProcessors.put(queryProcessor.getBackend(), queryProcessor);
- } catch (Exception e) {
- exceptions.add(e);
- LOGGER.warn("");
- LOGGER.warn("##########");
- LOGGER.warn("Could not load query processor for archiverappliance.");
- LOGGER.warn("##########");
- LOGGER.warn("");
- }
-
- if (queryProcessors.isEmpty()) {
- LOGGER.error("No query processor could be loaded! Exceptions were: ");
- for (Exception exception : exceptions) {
- LOGGER.error("", exception);
- }
-
- throw new RuntimeException("No Backends available!");
- }
-
- channelNameCache =
- new ChannelNameCache(queryProcessors, appContext.getBean(DomainConfig.BEAN_NAME_READ_TIMEOUT,
- Integer.class).longValue());
- }
-
@PreDestroy
- public void destroy() {
- channelNameCache.destroy();
- }
-
+ public void destroy() {}
+
@Override
public List getChannels(ChannelsRequest request) {
// in case not specified use defaults (e.g. GET)
@@ -105,9 +66,33 @@ public class QueryManagerImpl implements QueryManager {
return channelNameCache.getChannels(request);
}
-
+
+ public Collection getChannelInfos(ChannelNameRequest request) {
+ // set backends if not defined yet
+ channelNameCache.setBackends(request.getChannels());
+
+ Stream stream = request.getRequestsByBackend().entrySet().stream()
+ .filter(entry ->
+ backendAccess.hasDataReader(entry.getKey())
+ && backendAccess.hasChannelInfoReader(entry.getKey()))
+ .flatMap(entry -> {
+ return entry.getValue().getChannelInfos(entry.getKey(), backendAccess)
+ .entrySet().stream()
+ .map(innerEntry -> {
+ return new ChannelInfosStreamImpl(
+ new ChannelName(innerEntry.getKey(), entry.getKey()),
+ innerEntry.getValue()
+ );
+ }
+ );
+ });
+
+ // materialize
+ return stream.collect(Collectors.toList());
+ }
+
@Override
- public List>>> executeQueries(DAQQueries queries) {
+ public List>>> getEvents(DAQQueries queries) {
// set backends if not defined yet
channelNameCache.setBackends(queries);
@@ -116,32 +101,30 @@ public class QueryManagerImpl implements QueryManager {
for (DAQQueryElement queryElement : queries) {
Stream> resultStreams =
- BackendQuery
+ BackendQueryImpl
.getBackendQueries(queryElement)
.stream()
- .filter(query -> {
- QueryProcessor processor = queryProcessors.get(query.getBackend());
- if (processor != null) {
- return true;
- } else {
- LOGGER.warn("There is no QueryProcessor available for '{}'", query.getBackend());
- return false;
- }
- })
- .flatMap(query -> {
- QueryProcessor processor = queryProcessors.get(query.getBackend());
- QueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query);
+ .filter(
+ query ->
+ backendAccess.hasDataReader(query.getBackend())
+ && backendAccess.hasQueryProcessor(query.getBackend())
+ )
+ .flatMap(
+ query -> {
+ QueryProcessor processor = backendAccess.getQueryProcessor(query.getBackend());
+ BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query);
- // all the magic happens here
- Stream>> channelToDataEvents =
- processor.process(queryAnalizer);
- // do post-process
- Stream> channelToData = queryAnalizer.postProcess(channelToDataEvents);
+ /* all the magic happens here */
+ Stream>> channelToDataEvents =
+ processor.process(queryAnalizer);
+ /* do post-process */
+ Stream> channelToData =
+ queryAnalizer.postProcess(channelToDataEvents);
- return channelToData.map(entry -> {
- return Triple.of(query, entry.getKey(), entry.getValue());
+ return channelToData.map(entry -> {
+ return Triple.of(query, entry.getKey(), entry.getValue());
+ });
});
- });
// Now we have a stream that loads elements sequential BackendQuery by BackendQuery.
// By materializing the outer Stream the elements of all BackendQuery are loaded async
diff --git a/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java b/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java
new file mode 100644
index 0000000..3a2c952
--- /dev/null
+++ b/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java
@@ -0,0 +1,44 @@
+package ch.psi.daq.queryrest.query.model;
+
+import java.util.Iterator;
+import java.util.stream.Stream;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+import ch.psi.daq.domain.json.ChannelName;
+import ch.psi.daq.domain.json.channels.info.ChannelInfo;
+import ch.psi.daq.domain.json.channels.info.ChannelInfos;
+
+public class ChannelInfosStreamImpl implements ChannelInfos {
+ private ChannelName channel;
+ private Stream extends ChannelInfo> infos;
+
+ public ChannelInfosStreamImpl() {}
+
+ public ChannelInfosStreamImpl(ChannelName channel, Stream extends ChannelInfo> infos) {
+ this.channel = channel;
+ this.infos = infos;
+ }
+
+ @Override
+ public ChannelName getChannel() {
+ return channel;
+ }
+
+ public Stream extends ChannelInfo> getInfos() {
+ // can only be consumed once
+ return infos;
+ }
+
+ @JsonIgnore
+ @Override
+ public Iterator iterator() {
+ return getChannelInfos().iterator();
+ }
+
+ @JsonIgnore
+ @Override
+ public Stream getChannelInfos() {
+ return infos.map(info -> (ChannelInfo) info);
+ }
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
index d733234..1eb72e0 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
@@ -11,7 +11,7 @@ import org.apache.commons.lang3.tuple.Triple;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
-import ch.psi.daq.query.model.impl.BackendQuery;
+import ch.psi.daq.domain.query.backend.BackendQuery;
public interface ResponseStreamWriter {
diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java
index 207268f..11a6b50 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java
@@ -18,11 +18,12 @@ import ch.psi.daq.domain.FieldNames;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
+import ch.psi.daq.domain.query.backend.BackendQueryImpl;
import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.domain.query.operation.ResponseFormat;
-import ch.psi.daq.query.model.impl.BackendQuery;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
@@ -56,7 +57,7 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
// execute query
List>>> result =
- queryManager.executeQueries(queries);
+ queryManager.getEvents(queries);
// write the response back to the client using java 8 streams
streamWriter.respond(result, out);
} catch (Exception e) {
diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
index 52a66b3..bc307b4 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
@@ -34,12 +34,12 @@ import ch.psi.daq.common.stream.StreamMatcher;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
+import ch.psi.daq.domain.query.backend.BackendQueryImpl;
+import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.query.analyzer.QueryAnalyzer;
-import ch.psi.daq.query.model.Query;
-import ch.psi.daq.query.model.impl.BackendQuery;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
@@ -61,7 +61,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
.getGlobalMillis() / 10L;
@Resource
- private Function queryAnalizerFactory;
+ private Function queryAnalizerFactory;
@Override
public void respond(final List>>> results,
@@ -158,7 +158,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null;
List extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null;
- QueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
+ BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
for (QueryField field : queryFields) {
if (!(QueryField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) {
diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java
index 3bdeaa6..5e26b44 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java
@@ -16,9 +16,9 @@ import org.springframework.http.MediaType;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.ResponseFormat;
-import ch.psi.daq.query.model.impl.BackendQuery;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
@@ -48,7 +48,7 @@ public class JSONHTTPResponse extends AbstractHTTPResponse {
JSONResponseStreamWriter streamWriter = context.getBean(JSONResponseStreamWriter.class);
// execute query
- List>>> result = queryManager.executeQueries(queries);
+ List>>> result = queryManager.getEvents(queries);
// write the response back to the client using java 8 streams
streamWriter.respond(result, out);
} catch (Exception e) {
diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
index 27a8195..c40dc24 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
@@ -25,10 +25,10 @@ import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.query.model.impl.BackendQuery;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
diff --git a/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java b/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java
index fd725a4..71f74f4 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/AbstractDaqRestTest.java
@@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.daq.queryrest.QueryRestApplication;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutionListener;
+import ch.psi.daq.test.queryrest.config.DaqWebMvcConfig;
@TestExecutionListeners({
diff --git a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java b/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java
similarity index 55%
rename from src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java
rename to src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java
index cb512cd..768b5d0 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java
@@ -1,4 +1,7 @@
-package ch.psi.daq.test.queryrest;
+package ch.psi.daq.test.queryrest.config;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
@@ -10,17 +13,19 @@ import org.springframework.context.annotation.PropertySources;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
+import ch.psi.daq.archiverappliance.config.ArchiverApplianceConfig;
+import ch.psi.daq.cassandra.config.CassandraConfig;
import ch.psi.daq.cassandra.reader.CassandraReader;
+import ch.psi.daq.domain.backend.Backend;
+import ch.psi.daq.domain.backend.BackendAccess;
+import ch.psi.daq.domain.config.DomainConfig;
+import ch.psi.daq.domain.query.processor.QueryProcessor;
import ch.psi.daq.domain.reader.DataReader;
-import ch.psi.daq.domain.status.StatusReader;
import ch.psi.daq.query.config.QueryConfig;
-import ch.psi.daq.query.processor.QueryProcessor;
import ch.psi.daq.query.processor.QueryProcessorLocal;
import ch.psi.daq.test.query.config.LocalQueryTestConfig;
import ch.psi.daq.test.queryrest.query.DummyArchiverApplianceReader;
import ch.psi.daq.test.queryrest.query.DummyCassandraReader;
-import ch.psi.daq.test.queryrest.status.DummyArchiverApplianceStatusReader;
-import ch.psi.daq.test.queryrest.status.DummyCassandraStatusReader;
@Configuration
@ComponentScan
@@ -36,33 +41,33 @@ public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
static class InnerConfiguration {
}
- @Bean(name = QueryConfig.BEAN_NAME_CASSANDRA_QUERY_PROCESSOR)
- @Lazy
- public QueryProcessor cassandraQueryProcessor() {
- return new QueryProcessorLocal(cassandraReader());
+ @Resource(name = DomainConfig.BEAN_NAME_BACKEND_ACCESS)
+ private BackendAccess backendAccess;
+
+ @PostConstruct
+ public void afterPropertiesSet() {
+ backendAccess.addStreamEventReaderSupplier(Backend.SF_DATABUFFER, () -> cassandraReader());
+ backendAccess.addChannelInfoReaderSupplier(Backend.SF_DATABUFFER, () -> cassandraReader());
+
+ backendAccess.addDataReaderSupplier(Backend.SF_ARCHIVERAPPLIANCE, () -> archiverApplianceReader());
}
- @Bean
+ // make sure we use a local QueryProcessor even for distributed calls -> no Hazelcast needs to be started
+ @Bean(name = QueryConfig.BEAN_NAME_QUERY_PROCESSOR_DISTRIBUTED)
+ @Lazy
+ public QueryProcessor distributedQueryProcessor() {
+ return new QueryProcessorLocal();
+ }
+
+ @Bean(name = CassandraConfig.BEAN_NAME_CASSANDRA_READER)
@Lazy
public CassandraReader cassandraReader() {
return new DummyCassandraReader();
}
- @Bean
+ @Bean(name = ArchiverApplianceConfig.BEAN_NAME_ARCHIVER_APPLIANCE_READER)
@Lazy
public DataReader archiverApplianceReader() {
return new DummyArchiverApplianceReader();
}
-
- @Bean(name = QueryConfig.BEAN_NAME_CASSANDRA_STATUS_READER)
- @Lazy
- public StatusReader cassandraStatusReader() {
- return new DummyCassandraStatusReader();
- }
-
- @Bean(name = QueryConfig.BEAN_NAME_ARCHIVER_APPLIANCE_STATUS_READER)
- @Lazy
- public StatusReader archiverApplianceStatusReader() {
- return new DummyArchiverApplianceStatusReader();
- }
}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java
new file mode 100644
index 0000000..ab3bab3
--- /dev/null
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java
@@ -0,0 +1,173 @@
+package ch.psi.daq.test.queryrest.controller;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.junit.After;
+import org.junit.Test;
+import org.springframework.http.MediaType;
+import org.springframework.test.web.servlet.MvcResult;
+import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
+import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
+import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import ch.psi.bsread.message.Type;
+import ch.psi.daq.common.ordering.Ordering;
+import ch.psi.daq.common.time.TimeUtils;
+import ch.psi.daq.domain.backend.Backend;
+import ch.psi.daq.domain.config.DomainConfig;
+import ch.psi.daq.domain.json.channels.info.ChannelInfo;
+import ch.psi.daq.domain.json.channels.info.ChannelInfos;
+import ch.psi.daq.domain.json.channels.info.ChannelInfosList;
+import ch.psi.daq.domain.query.ChannelNameRequest;
+import ch.psi.daq.domain.request.range.RequestRangePulseId;
+import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
+
+/**
+ * Tests the {@link DaqController} implementation.
+ */
+public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
+
+ private ObjectMapper objectMapper = new ObjectMapper();
+
+ @After
+ public void tearDown() throws Exception {}
+
+ @Test
+ public void testChannelInfoQuery_01() throws Exception {
+ ChannelNameRequest query = new ChannelNameRequest(
+ new RequestRangePulseId(
+ 100,
+ 101),
+ "DataBuffer1", "DataBuffer2");
+
+ String content = mapper.writeValueAsString(query);
+ System.out.println(content);
+
+ MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
+ .post(DomainConfig.PATH_CHANNELS_INFO)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content))
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andReturn();
+
+ String response = result.getResponse().getContentAsString();
+ System.out.println("Response: " + response);
+
+ // test conversion used in DAQProcessing
+ List extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
+ assertEquals(2, infosList.size());
+ ChannelInfos cInfos = infosList.get(0);
+ assertEquals("DataBuffer1", cInfos.getChannel().getName());
+ assertEquals(Backend.SF_DATABUFFER, cInfos.getChannel().getBackend());
+ List infos = cInfos.getChannelInfos().collect(Collectors.toList());
+ assertEquals(2, infos.size());
+ ChannelInfo info = infos.get(0);
+ assertEquals("DataBuffer1", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ info = infos.get(1);
+ assertEquals("DataBuffer1", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+
+ cInfos = infosList.get(1);
+ assertEquals("DataBuffer2", cInfos.getChannel().getName());
+ assertEquals(Backend.SF_DATABUFFER, cInfos.getChannel().getBackend());
+ infos = cInfos.getChannelInfos().collect(Collectors.toList());
+ assertEquals(2, infos.size());
+ info = infos.get(0);
+ assertEquals("DataBuffer2", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ info = infos.get(1);
+ assertEquals("DataBuffer2", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ }
+
+ @Test
+ public void testChannelInfoQuery_02() throws Exception {
+ ChannelNameRequest query = new ChannelNameRequest(
+ new RequestRangePulseId(
+ 100,
+ 101),
+ "DataBuffer1", "DataBuffer2");
+ query.setOrdering(Ordering.desc);
+
+ String content = mapper.writeValueAsString(query);
+ System.out.println(content);
+
+ MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
+ .post(DomainConfig.PATH_CHANNELS_INFO)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content))
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andReturn();
+
+ String response = result.getResponse().getContentAsString();
+ System.out.println("Response: " + response);
+
+ // test conversion used in DAQProcessing
+ List extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
+ assertEquals(2, infosList.size());
+ ChannelInfos cInfos = infosList.get(0);
+ assertEquals("DataBuffer1", cInfos.getChannel().getName());
+ assertEquals(Backend.SF_DATABUFFER, cInfos.getChannel().getBackend());
+ List infos = cInfos.getChannelInfos().collect(Collectors.toList());
+ assertEquals(2, infos.size());
+ ChannelInfo info = infos.get(0);
+ assertEquals("DataBuffer1", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ info = infos.get(1);
+ assertEquals("DataBuffer1", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+
+ cInfos = infosList.get(1);
+ assertEquals("DataBuffer2", cInfos.getChannel().getName());
+ assertEquals(Backend.SF_DATABUFFER, cInfos.getChannel().getBackend());
+ infos = cInfos.getChannelInfos().collect(Collectors.toList());
+ assertEquals(2, infos.size());
+ info = infos.get(0);
+ assertEquals("DataBuffer2", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ info = infos.get(1);
+ assertEquals("DataBuffer2", info.getChannel());
+ assertEquals(Backend.SF_DATABUFFER, info.getBackend());
+ assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
+ assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
+ assertArrayEquals(new int[] {1}, info.getShape());
+ assertEquals(Type.Int32.getKey(), info.getType());
+ }
+}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelStatusTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelStatusTest.java
deleted file mode 100644
index 82ff11f..0000000
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelStatusTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package ch.psi.daq.test.queryrest.controller;
-
-import static org.junit.Assert.assertFalse;
-
-import org.junit.After;
-import org.junit.Test;
-import org.springframework.http.MediaType;
-import org.springframework.test.web.servlet.MvcResult;
-import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
-import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
-import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
-
-import ch.psi.daq.common.time.TimeUtils;
-import ch.psi.daq.domain.json.ChannelName;
-import ch.psi.daq.domain.query.status.channel.ChannelStatusQuery;
-import ch.psi.daq.domain.reader.Backend;
-import ch.psi.daq.queryrest.controller.QueryRestController;
-import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
-
-/**
- * Tests the {@link DaqController} implementation.
- */
-public class QueryRestControllerChannelStatusTest extends AbstractDaqRestTest {
-
- @After
- public void tearDown() throws Exception {}
-
- @Test
- public void testChannelStatusQuery_01() throws Exception {
- ChannelStatusQuery query = new ChannelStatusQuery("DataBuffer1", "DataBuffer2");
-
- String content = mapper.writeValueAsString(query);
- System.out.println(content);
-
- this.mockMvc
- .perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_STATUS_CHANNELS)
- .contentType(MediaType.APPLICATION_JSON)
- .content(content))
-
- .andExpect(MockMvcResultMatchers.status().isOk())
- .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value("DataBuffer1"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].recording").value(false))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].connected").value(false))
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[0].lastEventDate").value(
- TimeUtils.format(TimeUtils.getTimeFromMillis(0, 0))))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value("DataBuffer2"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].recording").value(false))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].connected").value(false))
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[1].lastEventDate").value(
- TimeUtils.format(TimeUtils.getTimeFromMillis(0, 0))))
- .andExpect(MockMvcResultMatchers.jsonPath("$[2]").doesNotExist());
- }
-
- @Test
- public void testChannelStatusQuery_02() throws Exception {
- ChannelStatusQuery query = new ChannelStatusQuery();
- query.addChannel(new ChannelName("ArchiverAppliance1", Backend.SF_ARCHIVERAPPLIANCE));
- query.addChannel(new ChannelName("ArchiverAppliance2", Backend.SF_ARCHIVERAPPLIANCE));
-
- String content = mapper.writeValueAsString(query);
- System.out.println(content);
-
- this.mockMvc
- .perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_STATUS_CHANNELS)
- .contentType(MediaType.APPLICATION_JSON)
- .content(content))
-
- .andExpect(MockMvcResultMatchers.status().isOk())
- .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value("ArchiverAppliance1"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].recording").value(true))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].connected").value(true))
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[0].lastEventDate").value(
- TimeUtils.format(TimeUtils.getTimeFromMillis(1467638000000L, 0))))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[1].channel.backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value("ArchiverAppliance2"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].recording").value(true))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].connected").value(true))
- .andExpect(
- MockMvcResultMatchers.jsonPath("$[1].lastEventDate").value(
- TimeUtils.format(TimeUtils.getTimeFromMillis(1467638000000L, 0))))
- .andExpect(MockMvcResultMatchers.jsonPath("$[2]").doesNotExist());
- }
-
- @Test
- public void testChannelStatusQuery_03() throws Exception {
- ChannelStatusQuery query = new ChannelStatusQuery();
- query.addChannel(new ChannelName("ArchiverAppliance1", Backend.SF_ARCHIVERAPPLIANCE));
- query.addChannel(new ChannelName("ArchiverAppliance2", Backend.SF_ARCHIVERAPPLIANCE));
-
- String content = mapper.writeValueAsString(query);
- System.out.println(content);
-
- MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_STATUS_CHANNELS)
- .contentType(MediaType.APPLICATION_JSON)
- .content(content))
- .andDo(MockMvcResultHandlers.print())
- .andExpect(MockMvcResultMatchers.status().isOk())
- .andReturn();
-
- String response = result.getResponse().getContentAsString();
- System.out.println("Response: " + response);
-
- assertFalse(response.contains("lastEventTime"));
- }
-}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
index b36d8e2..467f0bc 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
@@ -25,6 +25,7 @@ import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
+import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement;
@@ -38,7 +39,6 @@ import ch.psi.daq.domain.request.range.RequestRangeDate;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeTime;
import ch.psi.daq.domain.test.TestTimeUtils;
-import ch.psi.daq.queryrest.controller.QueryRestController;
import ch.psi.daq.queryrest.response.csv.CSVHTTPResponse;
import ch.psi.daq.queryrest.response.csv.CSVResponseStreamWriter;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
@@ -83,7 +83,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -154,8 +154,10 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
List channels = Arrays.asList(channel_01, channel_02, channel_03);
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
- -1,
- -1),
+ // dummy range as range is defined by channel_Seq (see
+ // DummyCassandraReader.getDummyEventStream())
+ 0,
+ 0),
channels);
request.setResponse(new CSVHTTPResponse());
@@ -171,7 +173,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -244,8 +246,10 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
List channels = Arrays.asList(channel_01, channel_02, channel_03);
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
- -1,
- -1),
+ // dummy range as range is defined by channel_Seq (see
+ // DummyCassandraReader.getDummyEventStream())
+ 0,
+ 0),
channels);
request.setResponse(new CSVHTTPResponse());
@@ -259,7 +263,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -356,7 +360,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERIES)
+ .post(DomainConfig.PATH_QUERIES)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -439,7 +443,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -523,7 +527,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -610,7 +614,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -684,7 +688,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
try {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print());
@@ -714,7 +718,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
try {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print());
@@ -764,7 +768,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -877,7 +881,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -1003,7 +1007,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -1086,7 +1090,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
MvcResult result = this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
@@ -1141,7 +1145,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -1163,7 +1167,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
index 0a61124..4262c9f 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
@@ -5,6 +5,7 @@ import java.util.Arrays;
import org.junit.After;
import org.junit.Test;
import org.springframework.http.MediaType;
+import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
@@ -12,6 +13,8 @@ import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
+import ch.psi.daq.domain.backend.Backend;
+import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery;
@@ -23,12 +26,10 @@ import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.domain.reader.Backend;
import ch.psi.daq.domain.request.range.RequestRangeDate;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeTime;
import ch.psi.daq.domain.test.TestTimeUtils;
-import ch.psi.daq.queryrest.controller.QueryRestController;
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
@@ -51,7 +52,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(
MockMvcRequestBuilders
- .get(QueryRestController.PATH_CHANNELS)
+ .get(DomainConfig.PATH_CHANNELS)
.contentType(MediaType.APPLICATION_JSON))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
@@ -80,7 +81,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
public void testSpecificChannelSearch() throws Exception {
this.mockMvc.perform(
MockMvcRequestBuilders
- .get(QueryRestController.PATH_CHANNELS + "/int32")
+ .get(DomainConfig.PATH_CHANNELS + "/int32")
.contentType(MediaType.APPLICATION_JSON))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
@@ -112,7 +113,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_CHANNELS)
+ .post(DomainConfig.PATH_CHANNELS)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -141,7 +142,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_CHANNELS)
+ .post(DomainConfig.PATH_CHANNELS)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -167,7 +168,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_CHANNELS)
+ .post(DomainConfig.PATH_CHANNELS)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -192,7 +193,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc.perform(
MockMvcRequestBuilders
- .options(QueryRestController.PATH_CHANNELS)
+ .options(DomainConfig.PATH_CHANNELS)
.contentType(MediaType.APPLICATION_JSON))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
@@ -209,7 +210,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
// http://localhost:8080/channels
this.mockMvc.perform(
MockMvcRequestBuilders
- .options(QueryRestController.PATH_CHANNELS)
+ .options(DomainConfig.PATH_CHANNELS)
.header("Origin", "*")
.header("Access-Control-Request-Method", "POST")
.contentType(MediaType.APPLICATION_JSON))
@@ -221,7 +222,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
// -v http://localhost:8080/channels
this.mockMvc.perform(
MockMvcRequestBuilders
- .options(QueryRestController.PATH_CHANNELS)
+ .options(DomainConfig.PATH_CHANNELS)
.header("Origin", "http://localhost:8080")
.header("Access-Control-Request-Method", "POST")
.contentType(MediaType.APPLICATION_JSON))
@@ -248,7 +249,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -306,7 +307,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -342,7 +343,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -389,7 +390,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERIES)
+ .post(DomainConfig.PATH_QUERIES)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -428,7 +429,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
}
@Test
- public void testTimeRangeQuery() throws Exception {
+ public void testTimeRangeQuery_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeTime(
TimeUtils.getTimeFromMillis(2000, 0),
@@ -438,7 +439,53 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content))
+
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(200))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(201))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 10000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(200))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(201))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 10000000)));
+ }
+
+ @Test
+ public void testTimeRangeQuery_02() throws Exception {
+ String content =
+ "{\"channels\":[{\"name\":\"testChannel1\"},{\"name\":\"testChannel2\"}],\"fields\":[],\"ordering\":\"asc\",\"range\":{\"startSeconds\":\"2.0\",\"endSeconds\":\"2.01\"}}";
+
+ MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
+ .post(DomainConfig.PATH_QUERY)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content))
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andReturn();
+
+ String response = result.getResponse().getContentAsString();
+ System.out.println("Response: " + response);
+
+ this.mockMvc.perform(MockMvcRequestBuilders
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -483,7 +530,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(
MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content)
)
@@ -525,7 +572,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
String content = mapper.writeValueAsString(request);
this.mockMvc
- .perform(MockMvcRequestBuilders.post(QueryRestController.PATH_QUERY)
+ .perform(MockMvcRequestBuilders.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -573,7 +620,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(
MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content)
)
@@ -625,7 +672,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(
MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content)
)
@@ -693,7 +740,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(
MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content)
)
@@ -807,43 +854,43 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
System.out.println(content);
this.mockMvc
- .perform(
- MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
- .contentType(MediaType.APPLICATION_JSON)
- .content(content)
- )
- .andDo(MockMvcResultHandlers.print())
- .andExpect(MockMvcResultMatchers.status().isOk())
- .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 0)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].min").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].mean").value(102.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].max").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].min").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].mean").value(102.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].max").value(104.0))
+ .perform(
+ MockMvcRequestBuilders
+ .post(DomainConfig.PATH_QUERY)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content)
+ )
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].min").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].mean").value(102.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].max").value(104.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].min").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].mean").value(102.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].max").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 50000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].min").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].mean").value(107.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].max").value(109.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].min").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].mean").value(107.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].max").value(109.0));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 50000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].min").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].mean").value(107.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].max").value(109.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].min").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].mean").value(107.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].max").value(109.0));
}
@Test
@@ -868,85 +915,85 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
System.out.println(content);
this.mockMvc
- .perform(
- MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
- .contentType(MediaType.APPLICATION_JSON)
- .content(content)
- )
- .andDo(MockMvcResultHandlers.print())
- .andExpect(MockMvcResultMatchers.status().isOk())
- .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 0)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].min").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].mean").value(102.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].max").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].min").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].mean").value(102.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].max").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.value").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 0)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.value").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.pulseId").value(104))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 40000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.value").value(100.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 0)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.value").value(104.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.pulseId").value(104))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 40000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.eventCount").value(1))
+ .perform(
+ MockMvcRequestBuilders
+ .post(DomainConfig.PATH_QUERY)
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(content)
+ )
+ .andDo(MockMvcResultHandlers.print())
+ .andExpect(MockMvcResultMatchers.status().isOk())
+ .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.SF_DATABUFFER.getKey()))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].min").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].mean").value(102.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[0].max").value(104.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].min").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].mean").value(102.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].value[1].max").value(104.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.value").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].minValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.value").value(104.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.pulseId").value(104))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 40000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[0].maxValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.value").value(100.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].minValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.value").value(104.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.pulseId").value(104))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 40000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].extrema[1].maxValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 50000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].min").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].mean").value(107.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].max").value(109.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].min").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].mean").value(107.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].max").value(109.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.value").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.pulseId").value(105))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 50000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.value").value(109.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.pulseId").value(109))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 90000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.value").value(105.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.pulseId").value(105))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 50000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.eventCount").value(1))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.value").value(109.0))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.pulseId").value(109))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.globalSeconds").value(
- TestTimeUtils.getTimeStr(1, 90000000)))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.eventCount").value(1));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 50000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].min").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].mean").value(107.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[0].max").value(109.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].min").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].mean").value(107.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].value[1].max").value(109.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.value").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.pulseId").value(105))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 50000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].minValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.value").value(109.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.pulseId").value(109))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 90000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[0].maxValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.value").value(105.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.pulseId").value(105))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 50000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].minValue.eventCount").value(1))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.value").value(109.0))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.pulseId").value(109))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 90000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].extrema[1].maxValue.eventCount").value(1));
}
@Test
@@ -963,7 +1010,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -984,7 +1031,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -1007,7 +1054,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -1029,7 +1076,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
@@ -1051,7 +1098,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
this.mockMvc
.perform(MockMvcRequestBuilders
- .post(QueryRestController.PATH_QUERY)
+ .post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
diff --git a/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java b/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
index b3facf2..c89db38 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
@@ -7,11 +7,12 @@ import java.util.stream.Stream;
import com.google.common.collect.Lists;
+import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.StreamEvent;
-import ch.psi.daq.domain.query.backend.PulseIdRangeQuery;
-import ch.psi.daq.domain.query.backend.TimeRangeQuery;
+import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.query.event.EventQuery;
-import ch.psi.daq.domain.reader.Backend;
+import ch.psi.daq.domain.query.range.PulseIdRangeQuery;
+import ch.psi.daq.domain.query.range.TimeRangeQuery;
import ch.psi.daq.domain.reader.DataReader;
public class DummyArchiverApplianceReader implements DataReader {
@@ -42,16 +43,30 @@ public class DummyArchiverApplianceReader implements DataReader {
return channelStream;
}
+ // @Override
+ // public Stream extends StreamEvent> getEventStream(PulseIdRangeQuery query) {
+ // return DummyCassandraReader.getDummyEventStream(query.getChannel(), query.getStartPulseId(),
+ // query.getEndPulseId(),
+ // query.getEventColumns())
+ // .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
+ // }
+
@Override
- public Stream extends StreamEvent> getEventStream(PulseIdRangeQuery query) {
- return DummyCassandraReader.getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(),
- query.getEventColumns())
+ public Stream extends StreamEvent> getEventStream(TimeRangeQuery query) {
+ return DummyCassandraReader.getDummyEventStream(query.getChannel(), query.getStartMillis() / 10,
+ query.getEndMillis() / 10)
.filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
}
@Override
- public Stream extends StreamEvent> getEventStream(TimeRangeQuery query) {
- return DummyCassandraReader.getDummyEventStream(query.getChannel(), query.getStartMillis() / 10, query.getEndMillis() / 10)
- .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
+ public TimeRangeQuery getTimeRangeQuery(PulseIdRangeQuery query) {
+ return new TimeRangeQuery(
+ TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0),
+ TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0),
+ query);
+ }
+
+ @Override
+ public void truncateCache() {
}
}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java b/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
index 6f96cba..7f1595a 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
@@ -1,10 +1,9 @@
-/**
- *
- */
package ch.psi.daq.test.queryrest.query;
import java.math.BigDecimal;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
@@ -15,45 +14,54 @@ import java.util.stream.Collectors;
import java.util.stream.LongStream;
import java.util.stream.Stream;
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
import org.apache.commons.lang3.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
+import ch.psi.bsread.message.Type;
import ch.psi.daq.cassandra.reader.CassandraReader;
-import ch.psi.daq.cassandra.util.test.CassandraDataGen;
+import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.FieldNames;
+import ch.psi.daq.domain.backend.Backend;
+import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.events.ChannelConfiguration;
import ch.psi.daq.domain.events.ChannelEvent;
-import ch.psi.daq.domain.events.MetaPulseId;
+import ch.psi.daq.domain.events.impl.ChannelConfigurationImpl;
import ch.psi.daq.domain.events.impl.ChannelEventImpl;
-import ch.psi.daq.domain.query.backend.PulseIdRangeQuery;
-import ch.psi.daq.domain.query.backend.TimeRangeQuery;
+import ch.psi.daq.domain.json.channels.info.ChannelInfo;
+import ch.psi.daq.domain.json.channels.info.ChannelInfoImpl;
import ch.psi.daq.domain.query.event.EventQuery;
import ch.psi.daq.domain.query.event.StreamEventQuery;
-import ch.psi.daq.domain.reader.Backend;
+import ch.psi.daq.domain.query.range.PulseIdRangeQuery;
+import ch.psi.daq.domain.query.range.TimeRangeQuery;
import ch.psi.daq.domain.reader.MetaStreamEventQuery;
+import ch.psi.daq.domain.test.backend.TestBackendAccess;
+import ch.psi.daq.domain.test.gen.TestDataGen;
import ch.psi.daq.domain.utils.PropertiesUtils;
public class DummyCassandraReader implements CassandraReader {
private static final Logger LOGGER = LoggerFactory.getLogger(DummyCassandraReader.class);
public static final String DATABUFFER_TEST_CHANNEL = "DataBufferTestChannel_";
private static final Random random = new Random(0);
-
private static final int KEYSPACE = 1;
- private CassandraDataGen dataGen;
private List channels;
private AtomicLong channelNameCallCounter = new AtomicLong();
- /**
- *
- */
- public DummyCassandraReader() {
- this.dataGen = new CassandraDataGen();
+ private TestDataGen dataGen;
+ private Backend backend = Backend.SF_DATABUFFER;
+ @Resource(name = DomainConfig.BEAN_NAME_TEST_BACKEND_ACCESS)
+ private TestBackendAccess testBackendAccess;
+
+
+ public DummyCassandraReader() {
this.channels = Lists.newArrayList(
"BoolScalar",
"BoolWaveform",
@@ -80,9 +88,14 @@ public class DummyCassandraReader implements CassandraReader {
"StringScalar");
}
+ @PostConstruct
+ public void afterPropertiesSet() {
+ dataGen = testBackendAccess.getTestDataGen(backend);
+ }
+
@Override
public Backend getBackend() {
- return Backend.SF_DATABUFFER;
+ return backend;
}
/**
@@ -100,12 +113,12 @@ public class DummyCassandraReader implements CassandraReader {
return channelStream;
}
- @Override
- public Stream getEventStream(PulseIdRangeQuery query) {
- return getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(),
- query.getEventColumns())
- .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
- }
+ // @Override
+ // public Stream getEventStream(PulseIdRangeQuery query) {
+ // return getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(),
+ // query.getEventColumns())
+ // .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
+ // }
@Override
public Stream getEventStream(TimeRangeQuery query) {
@@ -113,10 +126,6 @@ public class DummyCassandraReader implements CassandraReader {
.filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
}
- /**
- * @{inheritDoc
- */
- @Override
public Stream getEventStream(EventQuery eventQuery, Stream extends StreamEventQuery> queryProviders) {
Stream result = queryProviders.map(ceq -> {
if (ceq instanceof MetaStreamEventQuery) {
@@ -207,7 +216,7 @@ public class DummyCassandraReader implements CassandraReader {
);
}
});
-
+
return eventStream;
}
@@ -256,30 +265,26 @@ public class DummyCassandraReader implements CassandraReader {
throw new UnsupportedOperationException();
}
- /**
- * @{inheritDoc
- */
- @Override
- public Stream extends StreamEventQuery> getStreamEventQueryStream(PulseIdRangeQuery query) {
+ // /**
+ // * @{inheritDoc
+ // */
+ // @Override
+ // public Stream extends StreamEventQuery> getStreamEventQueryStream(PulseIdRangeQuery query) {
+ //
+ // return dataGen.generateMetaPulseId(
+ // query.getStartPulseId(),
+ // (query.getEndPulseId() - query.getStartPulseId() + 1),
+ // i -> i * 10,
+ // i -> 0,
+ // i -> i,
+ // query.getChannel())
+ // .stream()
+ // .map(metaPulse -> {
+ // metaPulse.setKeyspace(KEYSPACE);
+ // return metaPulse;
+ // });
+ // }
- return dataGen.generateMetaPulseId(
- query.getStartPulseId(),
- (query.getEndPulseId() - query.getStartPulseId() + 1),
- i -> i * 10,
- i -> 0,
- i -> i,
- query.getChannel())
- .stream()
- .map(metaPulse -> {
- metaPulse.setKeyspace(KEYSPACE);
- return metaPulse;
- });
- }
-
- /**
- * @{inheritDoc
- */
- @Override
public Stream extends StreamEventQuery> getStreamEventQueryStream(TimeRangeQuery query) {
return dataGen.generateMetaTime(
@@ -293,17 +298,17 @@ public class DummyCassandraReader implements CassandraReader {
query.getChannel()).stream();
}
- /**
- * @{inheritDoc
- */
- @Override
- public Stream getMetaStream(PulseIdRangeQuery query) {
-
- return getStreamEventQueryStream(query).map(r -> {
- return (MetaPulseId) r;
- });
-
- }
+ // /**
+ // * @{inheritDoc
+ // */
+ // @Override
+ // public Stream getMetaStream(PulseIdRangeQuery query) {
+ //
+ // return getStreamEventQueryStream(query).map(r -> {
+ // return (MetaPulseId) r;
+ // });
+ //
+ // }
/**
@@ -324,8 +329,50 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public Stream getChannelConfiguration(TimeRangeQuery query) {
- // implement when needed
- throw new UnsupportedOperationException();
+ List configs = new ArrayList<>();
+
+ BigDecimal time = query.getStartTime();
+ configs.add(
+ new ChannelConfigurationImpl(
+ query.getChannel(),
+ time,
+ TimeUtils.getMillis(time) / 10,
+ 0,
+ Type.Int32.getKey(),
+ new int[] {1},
+ false,
+ ChannelConfiguration.DEFAULT_LOCAL_WRITE,
+ ChannelConfiguration.DEFAULT_BIN_SIZE_IN_MILLIS,
+ ChannelConfiguration.SPLIT_COUNT,
+ ChannelConfiguration.DEFAULT_SOURCE,
+ ChannelConfiguration.DEFAULT_MODULO,
+ ChannelConfiguration.DEFAULT_OFFSET,
+ Backend.SF_DATABUFFER));
+ if (query.getEndMillis() > query.getStartMillis()) {
+ time = query.getEndTime();
+ configs.add(
+ new ChannelConfigurationImpl(
+ query.getChannel(),
+ time,
+ TimeUtils.getMillis(time) / 10,
+ 1,
+ Type.Int32.getKey(),
+ new int[] {1},
+ false,
+ ChannelConfiguration.DEFAULT_LOCAL_WRITE,
+ ChannelConfiguration.DEFAULT_BIN_SIZE_IN_MILLIS,
+ ChannelConfiguration.SPLIT_COUNT,
+ ChannelConfiguration.DEFAULT_SOURCE,
+ ChannelConfiguration.DEFAULT_MODULO,
+ ChannelConfiguration.DEFAULT_OFFSET,
+ Backend.SF_DATABUFFER));
+ }
+
+ if(Ordering.desc.equals(query.getOrdering())){
+ Collections.reverse(configs);
+ }
+
+ return configs.stream();
}
@Override
@@ -361,4 +408,22 @@ public class DummyCassandraReader implements CassandraReader {
// implement when needed
throw new UnsupportedOperationException();
}
+
+ @Override
+ public TimeRangeQuery getTimeRangeQuery(PulseIdRangeQuery query) {
+ return new TimeRangeQuery(
+ TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0),
+ TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0),
+ query);
+ }
+
+ @Override
+ public Stream extends ChannelInfo> getChannelInfoStream(TimeRangeQuery query) {
+ return getChannelConfiguration(query)
+ .map(channelConfiguration -> new ChannelInfoImpl(channelConfiguration));
+ }
+
+ @Override
+ public void truncateCache() {
+ }
}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/status/DummyArchiverApplianceStatusReader.java b/src/test/java/ch/psi/daq/test/queryrest/status/DummyArchiverApplianceStatusReader.java
deleted file mode 100644
index bdf600b..0000000
--- a/src/test/java/ch/psi/daq/test/queryrest/status/DummyArchiverApplianceStatusReader.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package ch.psi.daq.test.queryrest.status;
-
-import java.util.concurrent.CompletableFuture;
-
-import ch.psi.daq.common.time.TimeUtils;
-import ch.psi.daq.domain.json.ChannelName;
-import ch.psi.daq.domain.json.status.channel.ChannelStatus;
-import ch.psi.daq.domain.reader.Backend;
-import ch.psi.daq.domain.status.AbstractStatusReader;
-
-public class DummyArchiverApplianceStatusReader extends AbstractStatusReader {
-
- public DummyArchiverApplianceStatusReader() {
- super(Backend.SF_ARCHIVERAPPLIANCE, 30);
- }
-
- @Override
- public CompletableFuture getChannelStatusAsync(String channel) {
- return CompletableFuture.completedFuture(new ChannelStatus(new ChannelName(channel, getBackend()), true, true,
- TimeUtils.getTimeFromMillis(1467638000000L, 0)));
- }
-}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/status/DummyCassandraStatusReader.java b/src/test/java/ch/psi/daq/test/queryrest/status/DummyCassandraStatusReader.java
deleted file mode 100644
index 6d2cd3a..0000000
--- a/src/test/java/ch/psi/daq/test/queryrest/status/DummyCassandraStatusReader.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package ch.psi.daq.test.queryrest.status;
-
-import java.util.concurrent.CompletableFuture;
-
-import ch.psi.daq.common.time.TimeUtils;
-import ch.psi.daq.domain.json.ChannelName;
-import ch.psi.daq.domain.json.status.channel.ChannelStatus;
-import ch.psi.daq.domain.reader.Backend;
-import ch.psi.daq.domain.status.AbstractStatusReader;
-
-public class DummyCassandraStatusReader extends AbstractStatusReader {
-
- public DummyCassandraStatusReader() {
- super(Backend.SF_DATABUFFER, 30);
- }
-
- @Override
- public CompletableFuture getChannelStatusAsync(String channel) {
- return CompletableFuture.completedFuture(new ChannelStatus(new ChannelName(channel, getBackend()), false, false,
- TimeUtils.getTimeFromMillis(0, 0)));
- }
-}