> channelToData = queryAnalizer.postProcess(channelToDataEvents);
+
+ return channelToData.map(entry -> {
+ return Triple.of(query, entry.getKey(), entry.getValue());
+ });
+ });
+
+ // Now we have a stream that loads elements sequential BackendQuery by BackendQuery.
+ // By materializing the outer Stream the elements of all BackendQuery are loaded async
+ // (speeds things up but requires also more memory - i.e. it relies on Backends not loading
+ // all elements into memory at once)
+ resultStreams = resultStreams.collect(Collectors.toList()).stream();
+
+ results.add(Pair.of(queryElement, resultStreams));
+ }
+
+ return results;
+ }
+
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/AbstractResponse.java b/src/main/java/ch/psi/daq/queryrest/response/AbstractResponse.java
new file mode 100644
index 0000000..5369d27
--- /dev/null
+++ b/src/main/java/ch/psi/daq/queryrest/response/AbstractResponse.java
@@ -0,0 +1,60 @@
+package ch.psi.daq.queryrest.response;
+
+import java.io.OutputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.springframework.context.ApplicationContext;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonEncoding;
+
+import ch.psi.daq.domain.query.DAQQueries;
+import ch.psi.daq.domain.query.operation.ResponseFormat;
+import ch.psi.daq.domain.query.operation.ResponseImpl;
+
+public abstract class AbstractResponse extends ResponseImpl {
+
+ public AbstractResponse(ResponseFormat format) {
+ super(format);
+ }
+
+ @JsonIgnore
+ @Override
+ public abstract void respond(ApplicationContext context, DAQQueries queries, Object response) throws Exception;
+
+ /**
+ * Configures the output stream and headers according to whether compression is wanted or not.
+ *
+ * In order not to lose the information of the underlying type of data being transferred, the
+ * Content-Type header stays the same but, if compressed, the content-encoding header will be set
+ * accordingly.
+ *
+ * see http://tools.ietf.org/html/rfc2616#section-14.11 and see
+ * http://tools.ietf.org/html/rfc2616#section-3.5
+ *
+ * @param httpResponse The HttpServletResponse
+ * @param contentType The content type
+ * @return OutputStream The OutputStream
+ * @throws Exception Something goes wrong
+ */
+ @JsonIgnore
+ protected OutputStream handleCompressionAndResponseHeaders(HttpServletResponse httpResponse,
+ String contentType) throws Exception {
+ OutputStream out = httpResponse.getOutputStream();
+
+ httpResponse.setCharacterEncoding(JsonEncoding.UTF8.getJavaName());
+ httpResponse.setContentType(contentType);
+
+ httpResponse.addHeader("Content-Type", contentType);
+ String filename = "data." + this.getFileSuffix();
+ httpResponse.addHeader("Content-Disposition", "attachment; filename=" + filename);
+
+ if (this.isCompressed()) {
+ httpResponse.addHeader("Content-Encoding", this.getCompression().toString());
+ out = this.getCompression().wrapStream(out);
+ }
+
+ return out;
+ }
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/AbstractResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/AbstractResponseStreamWriter.java
deleted file mode 100644
index 496826f..0000000
--- a/src/main/java/ch/psi/daq/queryrest/response/AbstractResponseStreamWriter.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- *
- */
-package ch.psi.daq.queryrest.response;
-
-import java.io.OutputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.springframework.http.MediaType;
-
-import ch.psi.daq.domain.query.operation.ResponseFormat;
-import ch.psi.daq.domain.query.operation.ResponseOptions;
-
-public abstract class AbstractResponseStreamWriter implements ResponseStreamWriter {
-
- public static final String CONTENT_TYPE_CSV = "text/csv";
- protected static final String CONTENT_TYPE_JSON = MediaType.APPLICATION_JSON_VALUE;
-
- /**
- * Configures the output stream and headers according to whether compression is wanted or not.
- *
- * In order not to lose the information of the underlying type of data being transferred, the
- * Content-Type header stays the same but, if compressed, the content-encoding header will be set
- * accordingly.
- *
- * see http://tools.ietf.org/html/rfc2616#section-14.11 and
- * see http://tools.ietf.org/html/rfc2616#section-3.5
- *
- * @param options The options for the response
- * @param response The HttpServletResponse
- * @param contentType The content type
- * @return OutputStream The OutputStream
- * @throws Exception Something goes wrong
- */
- protected OutputStream handleCompressionAndResponseHeaders(ResponseOptions options, HttpServletResponse response,
- String contentType) throws Exception {
- OutputStream out = response.getOutputStream();
-
- response.addHeader("Content-Type", contentType);
- if (options.isCompressed()) {
- String filename = "data." + options.getCompression().getFileSuffix();
- response.addHeader("Content-Disposition", "attachment; filename=" + filename);
- response.addHeader("Content-Encoding", options.getCompression().toString());
- out = options.getCompression().wrapStream(out);
- } else {
- String filename = "data." + (options.getResponseFormat() == ResponseFormat.CSV ? "csv" : "json");
- response.addHeader("Content-Disposition", "attachment; filename=" + filename);
- }
-
- return out;
- }
-
-}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/PolymorphicResponseMixIn.java b/src/main/java/ch/psi/daq/queryrest/response/PolymorphicResponseMixIn.java
new file mode 100644
index 0000000..978028b
--- /dev/null
+++ b/src/main/java/ch/psi/daq/queryrest/response/PolymorphicResponseMixIn.java
@@ -0,0 +1,20 @@
+package ch.psi.daq.queryrest.response;
+
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+
+import ch.psi.daq.queryrest.response.csv.CSVResponse;
+import ch.psi.daq.queryrest.response.json.JSONResponse;
+
+@JsonTypeInfo(
+ use = JsonTypeInfo.Id.NAME,
+ include = JsonTypeInfo.As.EXISTING_PROPERTY,
+ property = "format")
+@JsonSubTypes({
+ @Type(value = JSONResponse.class, name = JSONResponse.FORMAT),
+ @Type(value = CSVResponse.class, name = CSVResponse.FORMAT)
+})
+// see: http://stackoverflow.com/questions/24631923/alternative-to-jackson-jsonsubtypes
+public abstract class PolymorphicResponseMixIn {
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
index 177d48f..d733234 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
@@ -1,17 +1,16 @@
package ch.psi.daq.queryrest.response;
+import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
-import ch.psi.daq.domain.query.operation.ResponseOptions;
import ch.psi.daq.query.model.impl.BackendQuery;
public interface ResponseStreamWriter {
@@ -21,10 +20,8 @@ public interface ResponseStreamWriter {
* {@link ServletResponse}.
*
* @param results The results results
- * @param options The options for the response
- * @param response {@link ServletResponse} instance given by the current HTTP request
+ * @param out The OutputStream
* @throws Exception thrown if writing to the output stream fails
*/
- public void respond(List>>> results, ResponseOptions options,
- HttpServletResponse response) throws Exception;
+ public void respond(List>>> results, OutputStream out) throws Exception;
}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponse.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponse.java
new file mode 100644
index 0000000..5dfff3c
--- /dev/null
+++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponse.java
@@ -0,0 +1,94 @@
+package ch.psi.daq.queryrest.response.csv;
+
+import java.io.OutputStream;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.stream.Stream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.lang3.tuple.Triple;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.ApplicationContext;
+
+import com.hazelcast.util.collection.ArrayUtils;
+
+import ch.psi.daq.domain.FieldNames;
+import ch.psi.daq.domain.json.ChannelName;
+import ch.psi.daq.domain.query.DAQQueries;
+import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.operation.AggregationType;
+import ch.psi.daq.domain.query.operation.Compression;
+import ch.psi.daq.domain.query.operation.QueryField;
+import ch.psi.daq.domain.query.operation.ResponseFormat;
+import ch.psi.daq.query.model.impl.BackendQuery;
+import ch.psi.daq.queryrest.query.QueryManager;
+import ch.psi.daq.queryrest.response.AbstractResponse;
+
+public class CSVResponse extends AbstractResponse {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CSVResponse.class);
+
+ public static final String FORMAT = "csv";
+ public static final String CONTENT_TYPE = "text/csv";
+
+ public CSVResponse() {
+ super(ResponseFormat.CSV);
+ }
+
+ public CSVResponse(Compression compression) {
+ this();
+ setCompression(compression);
+ }
+
+ @Override
+ public void respond(ApplicationContext context, DAQQueries queries, Object response) throws Exception {
+ OutputStream out;
+ if (response instanceof HttpServletResponse) {
+ out = super.handleCompressionAndResponseHeaders((HttpServletResponse) response, CONTENT_TYPE);
+ } else {
+ String message =
+ String.format("'%s' does not support response Object of type '%s'", getFormat().getKey(), response
+ .getClass().getName());
+ LOGGER.error(message);
+ throw new IllegalArgumentException(message);
+ }
+
+ // do csv specific validations
+ validateQueries(queries);
+
+ try {
+ LOGGER.debug("Executing query '{}'", queries);
+
+ QueryManager queryManager = context.getBean(QueryManager.class);
+ CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class);
+
+ // execute query
+ List>>> result =
+ queryManager.executeQueries(queries);
+ // write the response back to the client using java 8 streams
+ streamWriter.respond(result, out);
+ } catch (Exception e) {
+ LOGGER.error("Failed to execute query '{}'.", queries, e);
+ throw e;
+ }
+ }
+
+ protected void validateQueries(DAQQueries queries) {
+ for (DAQQueryElement query : queries) {
+ if (!(query.getAggregationType() == null || AggregationType.value.equals(query.getAggregationType()))) {
+ // We allow only no aggregation or value aggregation as
+ // extrema: nested structure and not clear how to map it to one line
+ // index: value is an array of Statistics whose size is not clear at initialization time
+ String message = "CSV export does not support '" + query.getAggregationType() + "'";
+ LOGGER.warn(message);
+ throw new IllegalArgumentException(message);
+ }
+
+
+ if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) {
+ query.addField(QueryField.globalMillis);
+ }
+ }
+ }
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
index bde3f73..7c17894 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java
@@ -21,7 +21,6 @@ import java.util.stream.Stream;
import javax.annotation.Resource;
import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
@@ -30,8 +29,6 @@ import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.fasterxml.jackson.core.JsonEncoding;
-
import ch.psi.daq.common.stream.StreamIterable;
import ch.psi.daq.common.stream.StreamMatcher;
import ch.psi.daq.domain.DataEvent;
@@ -39,18 +36,18 @@ import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.domain.query.operation.ResponseOptions;
import ch.psi.daq.query.analyzer.QueryAnalyzer;
import ch.psi.daq.query.model.Query;
import ch.psi.daq.query.model.impl.BackendQuery;
-import ch.psi.daq.queryrest.response.AbstractResponseStreamWriter;
+import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
-public class CSVResponseStreamWriter extends AbstractResponseStreamWriter {
-
+public class CSVResponseStreamWriter implements ResponseStreamWriter {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CSVResponseStreamWriter.class);
+
public static final char DELIMITER_CVS = ';';
public static final String DELIMITER_ARRAY = ",";
public static final char DELIMITER_CHANNELNAME_FIELDNAME = '.';
@@ -61,27 +58,13 @@ public class CSVResponseStreamWriter extends AbstractResponseStreamWriter {
private static final ToLongFunction> MATCHER_PROVIDER = (pair) -> pair.getValue()
.getGlobalMillis() / 10L;
- private static final Logger LOGGER = LoggerFactory.getLogger(CSVResponseStreamWriter.class);
-
@Resource
private Function queryAnalizerFactory;
@Override
- public void respond(List>>> results,
- ResponseOptions options,
- HttpServletResponse response) throws Exception {
- response.setCharacterEncoding(JsonEncoding.UTF8.getJavaName());
- response.setContentType(CONTENT_TYPE_CSV);
-
- respondInternal(results, options, response);
- }
-
- private void respondInternal(List>>> results,
- ResponseOptions options, HttpServletResponse response) throws Exception {
+ public void respond(final List>>> results, final OutputStream out) throws Exception {
AtomicReference exception = new AtomicReference<>();
- final OutputStream out = handleCompressionAndResponseHeaders(options, response, CONTENT_TYPE_CSV);
-
final Map>> streams = new LinkedHashMap<>(results.size());
final List header = new ArrayList<>();
final Collection>> accessors = new ArrayList<>();
diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponse.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponse.java
new file mode 100644
index 0000000..5d7777c
--- /dev/null
+++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponse.java
@@ -0,0 +1,69 @@
+package ch.psi.daq.queryrest.response.json;
+
+import java.io.OutputStream;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.stream.Stream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.lang3.tuple.Triple;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.ApplicationContext;
+import org.springframework.http.MediaType;
+
+import ch.psi.daq.domain.json.ChannelName;
+import ch.psi.daq.domain.query.DAQQueries;
+import ch.psi.daq.domain.query.DAQQueryElement;
+import ch.psi.daq.domain.query.operation.Compression;
+import ch.psi.daq.domain.query.operation.ResponseFormat;
+import ch.psi.daq.query.model.impl.BackendQuery;
+import ch.psi.daq.queryrest.query.QueryManager;
+import ch.psi.daq.queryrest.response.AbstractResponse;
+
+public class JSONResponse extends AbstractResponse {
+ private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponse.class);
+
+ public static final String FORMAT = "json";
+ public static final String CONTENT_TYPE = MediaType.APPLICATION_JSON_VALUE;
+
+ public JSONResponse() {
+ super(ResponseFormat.JSON);
+ }
+
+ public JSONResponse(Compression compression) {
+ this();
+ setCompression(compression);
+ }
+
+ @Override
+ public void respond(ApplicationContext context, DAQQueries queries, Object response) throws Exception {
+ OutputStream out;
+ if (response instanceof HttpServletResponse) {
+ out = super.handleCompressionAndResponseHeaders((HttpServletResponse) response, CONTENT_TYPE);
+ } else {
+ String message =
+ String.format("'%s' does not support response Object of type '%s'", getFormat().getKey(), response.getClass()
+ .getName());
+ LOGGER.error(message);
+ throw new IllegalArgumentException(message);
+ }
+
+ try {
+ LOGGER.debug("Executing query '{}'", queries);
+
+ QueryManager queryManager = context.getBean(QueryManager.class);
+ JSONResponseStreamWriter streamWriter = context.getBean(JSONResponseStreamWriter.class);
+
+ // execute query
+ List>>> result = queryManager.executeQueries(queries);
+ // write the response back to the client using java 8 streams
+ streamWriter.respond(result, out);
+ } catch (Exception e) {
+ LOGGER.error("Failed to execute query '{}'.", queries, e);
+ throw e;
+ }
+ }
+
+}
diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
index 6a22a45..bea2934 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java
@@ -10,12 +10,10 @@ import java.util.stream.Stream;
import javax.annotation.Resource;
import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.http.MediaType;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
@@ -29,15 +27,14 @@ import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.domain.query.operation.ResponseOptions;
import ch.psi.daq.query.model.impl.BackendQuery;
-import ch.psi.daq.queryrest.response.AbstractResponseStreamWriter;
+import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
-public class JSONResponseStreamWriter extends AbstractResponseStreamWriter {
+public class JSONResponseStreamWriter implements ResponseStreamWriter {
private static final String DATA_RESP_FIELD = "data";
@@ -49,20 +46,9 @@ public class JSONResponseStreamWriter extends AbstractResponseStreamWriter {
@Resource
private ObjectMapper mapper;
-
@Override
- public void respond(List>>> results,
- ResponseOptions options, HttpServletResponse response) throws Exception {
- response.setCharacterEncoding(JsonEncoding.UTF8.getJavaName());
- response.setContentType(MediaType.APPLICATION_JSON_VALUE);
-
- respondInternal(results, options, response);
- }
-
- private void respondInternal(List>>> results,
- ResponseOptions options, HttpServletResponse response) throws Exception {
+ public void respond(List>>> results, OutputStream out) throws Exception {
AtomicReference exception = new AtomicReference<>();
- OutputStream out = handleCompressionAndResponseHeaders(options, response, CONTENT_TYPE_JSON);
JsonGenerator generator = jsonFactory.createGenerator(out, JsonEncoding.UTF8);
try {
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
index 944638b..2ae3e88 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
@@ -32,12 +32,12 @@ import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField;
-import ch.psi.daq.domain.query.operation.ResponseFormat;
import ch.psi.daq.domain.request.range.RequestRangeDate;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeTime;
import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.queryrest.controller.QueryRestController;
+import ch.psi.daq.queryrest.response.csv.CSVResponse;
import ch.psi.daq.queryrest.response.csv.CSVResponseStreamWriter;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
@@ -62,7 +62,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -155,7 +155,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
-1,
-1),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -245,7 +245,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
-1,
-1),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -332,7 +332,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
testChannel3));
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
channels = Arrays.asList(TEST_CHANNEL_01, TEST_CHANNEL_02, testChannel3);
LinkedHashSet queryFields = new LinkedHashSet<>();
@@ -418,7 +418,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -502,7 +502,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
TimeUtils.getTimeFromMillis(0, 0),
TimeUtils.getTimeFromMillis(10, 0)),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -587,7 +587,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
startDate,
endDate),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -675,7 +675,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
Ordering.asc,
AggregationType.extrema,
TEST_CHANNEL_NAMES[0]);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
String content = mapper.writeValueAsString(request);
@@ -705,7 +705,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
Ordering.asc,
AggregationType.index,
TEST_CHANNEL_NAMES[0]);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
String content = mapper.writeValueAsString(request);
@@ -739,7 +739,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
endDate),
channels);
request.setNrOfBins(2);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -839,7 +839,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
endDate),
channels);
request.setBinSize(100);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -934,7 +934,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
channels);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
LinkedHashSet queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel);
@@ -995,8 +995,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
10,
11),
TEST_CHANNEL_NAMES);
- request.setResponseFormat(ResponseFormat.CSV);
- request.setCompression(Compression.GZIP);
+ request.setResponse(new CSVResponse(Compression.GZIP));
String content = mapper.writeValueAsString(request);
@@ -1018,7 +1017,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
10,
11),
TEST_CHANNEL_NAMES);
- request.setResponseFormat(ResponseFormat.CSV);
+ request.setResponse(new CSVResponse());
String content = mapper.writeValueAsString(request);
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
index 83ac483..50ed75a 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
@@ -24,6 +24,7 @@ import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeTime;
import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.queryrest.controller.QueryRestController;
+import ch.psi.daq.queryrest.response.json.JSONResponse;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
/**
@@ -664,7 +665,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
10,
11),
TEST_CHANNEL_NAMES);
- request.setCompression(Compression.GZIP);
+ request.setResponse(new JSONResponse(Compression.GZIP));
String content = mapper.writeValueAsString(request);
System.out.println(content);
diff --git a/src/test/java/ch/psi/daq/test/queryrest/response/ResponseQueryTest.java b/src/test/java/ch/psi/daq/test/queryrest/response/ResponseQueryTest.java
new file mode 100644
index 0000000..7a2f395
--- /dev/null
+++ b/src/test/java/ch/psi/daq/test/queryrest/response/ResponseQueryTest.java
@@ -0,0 +1,106 @@
+package ch.psi.daq.test.queryrest.response;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import javax.annotation.Resource;
+
+import org.junit.Test;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import ch.psi.daq.domain.query.DAQQuery;
+import ch.psi.daq.domain.query.operation.Compression;
+import ch.psi.daq.domain.query.operation.Response;
+import ch.psi.daq.domain.request.range.RequestRangePulseId;
+import ch.psi.daq.queryrest.response.csv.CSVResponse;
+import ch.psi.daq.queryrest.response.json.JSONResponse;
+import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
+
+public class ResponseQueryTest extends AbstractDaqRestTest{
+
+ @Resource
+ private ObjectMapper mapper;
+
+ @Test
+ public void test_JSON_01() throws JsonParseException, JsonMappingException, IOException {
+ Response respose = new CSVResponse();
+
+ String value = mapper.writeValueAsString(respose);
+
+ Response deserial = mapper.readValue(value, Response.class);
+
+ assertEquals(respose.getClass(), deserial.getClass());
+ assertEquals(respose.getFormat(), deserial.getFormat());
+ assertEquals(respose.getCompression(), deserial.getCompression());
+ }
+
+ @Test
+ public void test_JSON_02() throws JsonParseException, JsonMappingException, IOException {
+ DAQQuery query = new DAQQuery(
+ new RequestRangePulseId(
+ 0,
+ 100),
+ "TestChannel_01");
+ query.setResponse(new CSVResponse(Compression.GZIP));
+
+ String value = mapper.writeValueAsString(query);
+
+ DAQQuery deserial = mapper.readValue(value, DAQQuery.class);
+
+ assertNotNull(deserial.getResponse());
+ assertEquals(query.getResponse().getClass(), deserial.getResponse().getClass());
+ assertEquals(query.getResponse().getFormat(), deserial.getResponse().getFormat());
+ assertEquals(query.getResponse().getCompression(), deserial.getResponse().getCompression());
+
+ assertEquals(query.getResponse().getCompression().getFileSuffix(), deserial.getResponse().getFileSuffix());
+ }
+
+ @Test
+ public void test_JSON_03() throws JsonParseException, JsonMappingException, IOException {
+ DAQQuery query = new DAQQuery(
+ new RequestRangePulseId(
+ 0,
+ 100),
+ "TestChannel_01");
+ query.setResponse(new JSONResponse(Compression.NONE));
+
+ String value = mapper.writeValueAsString(query);
+
+ int index = value.indexOf("format");
+ assertTrue(index >= 0);
+ index = value.indexOf("format", index + 1);
+ // ensure string contains identifier only once
+ assertEquals(-1, index);
+
+ DAQQuery deserial = mapper.readValue(value, DAQQuery.class);
+
+ assertNotNull(deserial.getResponse());
+ assertEquals(query.getResponse().getClass(), deserial.getResponse().getClass());
+ assertEquals(query.getResponse().getFormat(), deserial.getResponse().getFormat());
+ assertEquals(query.getResponse().getCompression(), deserial.getResponse().getCompression());
+
+ assertEquals(query.getResponse().getFormat().getFileSuffix(), deserial.getResponse().getFileSuffix());
+ }
+
+ @Test
+ public void test_JSON_04() throws JsonParseException, JsonMappingException, IOException {
+ DAQQuery query = new DAQQuery(
+ new RequestRangePulseId(
+ 0,
+ 100),
+ "TestChannel_01");
+
+ String value = mapper.writeValueAsString(query);
+
+ DAQQuery deserial = mapper.readValue(value, DAQQuery.class);
+
+ assertNull(deserial.getResponse());
+ }
+}