ATEST-371

This commit is contained in:
Fabian Märki
2016-06-08 15:43:40 +02:00
parent b27b43a6f6
commit 24daa85e0b
8 changed files with 264 additions and 85 deletions

View File

@ -1,5 +1,5 @@
#
#Wed May 04 13:27:42 CEST 2016
#Wed Jun 08 12:47:13 CEST 2016
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve

181
Readme.md
View File

@ -61,7 +61,14 @@ POST http://<host>:<port>/channels
#### Data
```json
{"regex": "TRFCA|TRFCB","backends": ["sf-databuffer"],"ordering":"asc","reload":true}
{
"regex":"TRFCA|TRFCB",
"backends":[
"sf-databuffer"
],
"ordering":"asc",
"reload":true
}
```
##### Explanation
@ -116,16 +123,46 @@ GET http://<host>:<port>/query
A request is performed by sending a valid JSON object in the HTTP request body. The JSON query defines the channels to be queried, the range, and how the data should be aggregated (this is optional but highly recommended).
The following attributes can be specified:
#### Data
```json
{
"channels":[
"Channel_01"
],
"range":{
"startPulseId":0,
"endPulseId":3
},
"ordering":"asc",
"fields":[
"pulseId",
"globalDate",
"value"
],
"aggregation":{
"aggregationType":"value",
"aggregations":[
"min",
"mean",
"max"
],
"nrOfBins":2
},
"response":{
"format":"json",
"compression":"none"
}
}
```
##### Explanation
- **channels**: Array of channels to be queried (see [here](Readme.md#query_channel_names) and [here](Readme.md#define_channel_names)).
- **range**: The range of the query (see [here](Readme.md#query_range)).
- **ordering**: The ordering of the data (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.common/blob/master/src/main/java/ch/psi/daq/common/ordering/Ordering.java) for possible values).
- **fields**: The requested fields (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/QueryField.java) for possible values).
- **nrOfBins**: Activates data binning. Specifies the number of bins the pulse/time range should be divided into.
- **binSize**: Activates data binning. Specifies the number of pulses per bin for pulse-range queries or the number of milliseconds per bin for time-range queries (using number of pulses and number of milliseconds makes this binning strategy consistent between channel with different update frequencies).
- **aggregations**: Activates data aggregation. Array of requested aggregations (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/Aggregation.java) for possible values). These values will be added to the *data* array response.
- **aggregationType**: Specifies the type of aggregation (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/AggregationType.java)). The default type is *value* aggregation (e.g., sum([1,2,3])=6). Alternatively, it is possible to define *index* aggregation for multiple arrays in combination with binning (e.g., sum([1,2,3], [3,2,1]) = [4,4,4]).
- **ordering**: The ordering of the data (see [here](Readme.md#data_ordering)).
- **fields**: Array of requested fields (see [here](Readme.md#requested_fields)).
- **aggregation**: Setting this attribute activates data aggregation (see [here](Readme.md#data_aggregation) for its specification).
- **response**: Specifies the format of the response of the requested data (see [here](Readme.md#response_format)). If this value is not set it defaults to JSON.
<a name="define_channel_names"/>
@ -203,6 +240,59 @@ Queries are applied to a range. The following types of ranges are supported.
- **endSeconds**: The end time of the range in seconds.
<a name="data_ordering"/>
### Data Ordering
```json
"ordering":"asc"
```
- **ordering**: Defines the ordering of the requested data (values: **asc**|desc|none). Use *none* in case ordering does not matter (allows for server side optimizations).
<a name="requested_fields"/>
### Requested Fields
```json
"fields":[
"pulseId",
"globalDate",
"value"
]
```
- **fields**: Array of requested fields (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/QueryField.java) for possible values).
It is possible to request the time in seconds (since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds - using fields *globalSeconds* and *iocSeconds*), in milliseconds (since January 1, 1970 (the JAVA epoch) - using fields *globalMillis* and *iocMillis*) or as a ISO8601 formatted String - using fields *globalDate* and *iocDate* (such as 1997-07-16T19:20:30.123456789+02:00).
<a name="data_aggregation"/>
### Data Aggregation
It is possible (and recommended) to aggregate queried data.
```json
"aggregation":{
"aggregationType":"value",
"aggregations":[
"min",
"mean",
"max"
],
"nrOfBins":2
}
```
- **aggregationType**: Specifies the type of aggregation (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/AggregationType.java)). The default type is *value* aggregation (e.g., sum([1,2,3])=6). Alternatively, it is possible to define *index* aggregation for multiple arrays in combination with binning (e.g., sum([1,2,3], [3,2,1]) = [4,4,4]).
- **aggregations**: Array of requested aggregations (see [here](https://github.psi.ch/sf_daq/ch.psi.daq.domain/blob/master/src/main/java/ch/psi/daq/domain/query/operation/Aggregation.java) for possible values). These values will be added to the *data* array response.
- **nrOfBins**: Activates data binning. Specifies the number of bins the pulse/time range should be divided into.
- **msPerBin**: Activates data binning. Specifies the number of milliseconds per bin for time-range queries (using number of milliseconds makes this binning strategy consistent between channel with different update frequencies).
- **pulsesPerBin**: Activates data binning. Specifies the number of pulses per bin for pulse-range queries (using number of pulses makes this binning strategy consistent between channel with different update frequencies).
<a name="response_format"/>
### Response Format
@ -393,22 +483,9 @@ See JSON representation of the data above.
```json
{
"compression":"gzip",
"range":{
"startPulseId":0,
"endPulseId":3
},
"channels":[
"Channel_01"
]
}
```
or `deflate` can be used too:
```json
{
"compression":"deflate",
"response":{
"compression":"gzip"
},
"range":{
"startPulseId":0,
"endPulseId":3
@ -424,7 +501,7 @@ or `deflate` can be used too:
The `curl` command has a `--compressed` option to decompress data automatically.
```bash
curl --compressed -H "Content-Type: application/json" -X POST -d '{"compression":"gzip","range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl --compressed -H "Content-Type: application/json" -X POST -d '{"response":{"compression":"gzip"},"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
#### Querying for Specific Fields
@ -505,8 +582,6 @@ curl -H "Content-Type: application/json" -X POST -d '{"fields":["pulseId","valu
}
```
It is possible to request the time in seconds (since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds - using fields *globalSeconds* and *iocSeconds*), in milliseconds (since January 1, 1970 (the JAVA epoch) - using fields *globalMillis* and *iocMillis*) or as a ISO8601 formatted String - using fields *globalDate* and *iocDate* (such as 1997-07-16T19:20:30.123456789+02:00).
##### Command
```bash
@ -590,8 +665,10 @@ curl -H "Content-Type: application/json" -X POST -d '{"ordering":"desc","fields
```json
{
"aggregationType":"value",
"aggregations":["min","max","mean"],
"aggregation":{
"aggregationType":"value",
"aggregations":["min","mean","max"]
},
"fields":["pulseId","value"],
"range":{
"startPulseId":0,
@ -606,7 +683,7 @@ curl -H "Content-Type: application/json" -X POST -d '{"ordering":"desc","fields
##### Command
```bash
curl -H "Content-Type: application/json" -X POST -d '{"aggregationType":"value","aggregations":["min","max","mean"],"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl -H "Content-Type: application/json" -X POST -d '{"aggregation":{"aggregationType":"value","aggregations":["min","mean","max"]},"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
##### Response
@ -663,9 +740,11 @@ Illustration of array value aggregation:
```json
{
"nrOfBins":2,
"aggregationType":"value",
"aggregations":["min","max","mean"],
"aggregation":{
"nrOfBins":2,
"aggregationType":"value",
"aggregations":["min","mean","max"]
},
"fields":["pulseId","value"],
"range":{
"startPulseId":0,
@ -680,7 +759,7 @@ Illustration of array value aggregation:
##### Command
```bash
curl -H "Content-Type: application/json" -X POST -d '{"nrOfBins":2,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl -H "Content-Type: application/json" -X POST -d '{"aggregation":{"nrOfBins":2,"aggregationType":"value","aggregations":["min","mean","max"]},"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
##### Response
@ -716,15 +795,19 @@ Illustration of array value aggregation with additional binning:
![Value Aggregation with Binning](doc/images/Value_Binning_NrOfBins.png)
#### Value Aggregation with Binning (binSize)
#### Value Aggregation with Binning (msPerBin/pulsesPerBin)
**binSize** specifies the number of pulses per bin for pulse-range queries or the number of milliseconds per bin for time-range queries (using number of pulses and number of milliseconds makes this binning strategy consistent between channel with different update frequencies).
**msPerBin** specifies the number of milliseconds per bin for time-range queries (using number of milliseconds makes this binning strategy consistent between channel with different update frequencies).
**pulsesPerBin** specifies the number of pulses per bin for pulse-range queries (using number of pulses makes this binning strategy consistent between channel with different update frequencies).
```json
{
"binSize":10,
"aggregationType":"value",
"aggregations":["min","max","mean"],
"aggregation":{
"pulsesPerBin":2,
"aggregationType":"value",
"aggregations":["min","mean","max"]
},
"fields":["globalMillis","value"],
"range":{
"startSeconds":"0.0",
@ -739,7 +822,7 @@ Illustration of array value aggregation with additional binning:
##### Command
```bash
curl -H "Content-Type: application/json" -X POST -d '{"binSize":10,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["globalMillis","value"],"range":{"startSeconds":"0.0","endSeconds":"0.030000000"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl -H "Content-Type: application/json" -X POST -d '{"aggregation":{"pulsesPerBin":2,"aggregationType":"value","aggregations":["min","mean","max"]},"fields":["globalMillis","value"],"range":{"startSeconds":"0.0","endSeconds":"0.030000000"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
##### Response
@ -779,9 +862,11 @@ Illustration of array value aggregation with additional binning:
```json
{
"nrOfBins":1,
"aggregationType":"index",
"aggregations":["min","max","mean","sum"],
"aggregation":{
"pulsesPerBin":1,
"aggregationType":"index",
"aggregations":["min","mean","max","sum"]
},
"fields":["pulseId","value"],
"range":{
"startPulseId":0,
@ -796,7 +881,7 @@ Illustration of array value aggregation with additional binning:
##### Command
```bash
curl -H "Content-Type: application/json" -X POST -d '{"nrOfBins":1,"aggregationType":"index","aggregations":["min","max","mean","sum"],"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl -H "Content-Type: application/json" -X POST -d '{"aggregation":{"nrOfBins":1,"aggregationType":"index","aggregations":["min","max","mean","sum"]},"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
##### Response
@ -849,8 +934,10 @@ Illustration of array index aggregation with additional with binning (several nr
```json
{
"aggregationType":"extrema",
"aggregations":["min","max","sum"],
"aggregation":{
"aggregationType":"extrema",
"aggregations":["min","max","sum"]
},
"fields":["pulseId","value"],
"range":{
"startPulseId":0,
@ -865,7 +952,7 @@ Illustration of array index aggregation with additional with binning (several nr
##### Command
```bash
curl -H "Content-Type: application/json" -X POST -d '{"aggregationType":"extrema","aggregations":["min","max","sum"],"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
curl -H "Content-Type: application/json" -X POST -d '"aggregation":{"aggregationType":"extrema","aggregations":["min","max","sum"]},"fields":["pulseId","value"],"range":{"startPulseId":0,"endPulseId":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query | python -m json.tool
```
##### Response

View File

@ -14,6 +14,7 @@ import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.domain.request.Request;
import ch.psi.daq.queryrest.config.QueryRestConfig;
public class QueryValidator implements Validator {
@ -38,23 +39,46 @@ public class QueryValidator implements Validator {
@Override
public void validate(Object target, Errors errors) {
if (target instanceof DAQQuery) {
this.checkElement((DAQQuery) target);
}else if(target instanceof DAQQueries){
this.checkElement((DAQQuery) target, errors);
} else if (target instanceof DAQQueries) {
DAQQueries queries = (DAQQueries) target;
for (DAQQueryElement daqQueryElement : queries) {
this.checkElement(daqQueryElement);
this.checkElement(daqQueryElement, errors);
}
}
}
private void checkElement(DAQQueryElement query) {
private void checkElement(DAQQueryElement query, Errors errors) {
// set default values (if not set)
if (query.getFields() == null || query.getFields().isEmpty()) {
query.setFields(new LinkedHashSet<>(defaultResponseFields));
}
if (query.getAggregations() == null || query.getAggregations().isEmpty()) {
query.setAggregations(new ArrayList<>(defaultResponseAggregations));
if (query.getAggregation() != null) {
// check if only one binning element is defined
long msPerBin = query.getAggregation().getMsPerBin();
long pulsesPerBin = query.getAggregation().getPulsesPerBin();
int nrOfBins = query.getAggregation().getNrOfBins();
if ((msPerBin != Request.NOT_SET && (pulsesPerBin != Request.NOT_SET || nrOfBins != Request.NOT_SET))
|| (pulsesPerBin != Request.NOT_SET && (msPerBin != Request.NOT_SET || nrOfBins != Request.NOT_SET))
|| (nrOfBins != Request.NOT_SET && (msPerBin != Request.NOT_SET || pulsesPerBin != Request.NOT_SET))) {
errors.reject("msPerBin", "Only one binning element must be defined.");
errors.reject("pulsesPerBin", "Only one binning element must be defined.");
errors.reject("nrOfBins", "Only one binning element must be defined.");
}
if (query.getRange().isPulseIdRangeDefined() && msPerBin != Request.NOT_SET) {
errors.reject("msPerBin", "Pulse range queries only support pulse based binning.");
}
if (query.getRange().isTimeRangeDefined() && pulsesPerBin != Request.NOT_SET) {
errors.reject("pulsesPerBin", "Time range queries only support time based binning.");
}
// set default values (if not set)
if (query.getAggregation().getAggregations() == null || query.getAggregation().getAggregations().isEmpty()) {
query.getAggregation().setAggregations(new ArrayList<>(defaultResponseAggregations));
}
}
}
}

View File

@ -67,11 +67,11 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
protected void validateQueries(DAQQueries queries) {
for (DAQQueryElement query : queries) {
if (!(query.getAggregationType() == null || AggregationType.value.equals(query.getAggregationType()))) {
if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation().getAggregationType()))) {
// We allow only no aggregation or value aggregation as
// extrema: nested structure and not clear how to map it to one line
// index: value is an array of Statistics whose size is not clear at initialization time
String message = "CSV export does not support '" + query.getAggregationType() + "'";
String message = "CSV export does not support '" + query.getAggregation().getAggregationType() + "'";
LOGGER.warn(message);
throw new IllegalArgumentException(message);
}

View File

@ -151,7 +151,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
private void setupChannelColumns(DAQQueryElement daqQuery, BackendQuery backendQuery, ChannelName channelName,
Collection<String> header, Collection<Pair<ChannelName, Function<DataEvent, String>>> accessors) {
Set<QueryField> queryFields = daqQuery.getFields();
List<Aggregation> aggregations = daqQuery.getAggregations();
List<Aggregation> aggregations = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null;
QueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
@ -165,7 +165,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
}
if (aggregations != null && queryAnalyzer.isAggregationEnabled()) {
for (Aggregation aggregation : daqQuery.getAggregations()) {
for (Aggregation aggregation : aggregations) {
header.add(channelName.getName() + DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value.name()
+ DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name());
accessors.add(Pair.of(channelName, new AggregationStringifyer(aggregation.getAccessor(), EMPTY_VALUE)));

View File

@ -105,7 +105,7 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
protected Set<String> getFields(DAQQueryElement query) {
Set<QueryField> queryFields = query.getFields();
List<Aggregation> aggregations = query.getAggregations();
List<Aggregation> aggregations = query.getAggregation() != null ? query.getAggregation().getAggregations() : null;
Set<String> includedFields =
new LinkedHashSet<String>(queryFields.size() + (aggregations != null ? aggregations.size() : 0));
@ -114,7 +114,7 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
includedFields.add(field.name());
}
if (aggregations != null) {
for (Aggregation aggregation : query.getAggregations()) {
for (Aggregation aggregation : aggregations) {
includedFields.add(aggregation.name());
}
}

View File

@ -29,6 +29,7 @@ import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.AggregationDescriptor;
import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField;
@ -673,7 +674,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
Ordering.asc,
AggregationType.extrema,
new AggregationDescriptor(AggregationType.extrema),
TEST_CHANNEL_NAMES[0]);
request.setResponse(new CSVHTTPResponse());
@ -703,7 +704,7 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
0,
1),
Ordering.asc,
AggregationType.index,
new AggregationDescriptor(AggregationType.index),
TEST_CHANNEL_NAMES[0]);
request.setResponse(new CSVHTTPResponse());
@ -733,12 +734,17 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
long endTime = 99;
String startDate = TimeUtils.format(startTime);
String endDate = TimeUtils.format(endTime);
List<Aggregation> aggregations = new ArrayList<>();
aggregations.add(Aggregation.min);
aggregations.add(Aggregation.mean);
aggregations.add(Aggregation.max);
DAQQuery request = new DAQQuery(
new RequestRangeDate(
startDate,
endDate),
channels);
request.setNrOfBins(2);
request.setAggregation(new AggregationDescriptor().setNrOfBins(2).setAggregations(aggregations));
request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>();
@ -752,12 +758,6 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
queryFields.add(QueryField.eventCount);
request.setFields(queryFields);
List<Aggregation> aggregations = new ArrayList<>();
aggregations.add(Aggregation.min);
aggregations.add(Aggregation.mean);
aggregations.add(Aggregation.max);
request.setAggregations(aggregations);
String content = mapper.writeValueAsString(request);
System.out.println(content);
@ -833,12 +833,17 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
long endTime = 999;
String startDate = TimeUtils.format(startTime);
String endDate = TimeUtils.format(endTime);
List<Aggregation> aggregations = new ArrayList<>();
aggregations.add(Aggregation.min);
aggregations.add(Aggregation.mean);
aggregations.add(Aggregation.max);
DAQQuery request = new DAQQuery(
new RequestRangeDate(
startDate,
endDate),
channels);
request.setBinSize(100);
request.setAggregation(new AggregationDescriptor().setMsPerBin(100).setAggregations(aggregations));
request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>();
@ -852,12 +857,6 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
queryFields.add(QueryField.eventCount);
request.setFields(queryFields);
List<Aggregation> aggregations = new ArrayList<>();
aggregations.add(Aggregation.min);
aggregations.add(Aggregation.mean);
aggregations.add(Aggregation.max);
request.setAggregations(aggregations);
String content = mapper.writeValueAsString(request);
System.out.println(content);

View File

@ -15,6 +15,7 @@ import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.operation.AggregationDescriptor;
import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField;
@ -199,7 +200,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
// all headers are set
this.mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build();
// curl -H "Origin: *" -H "Access-Control-Request-Method: POST" -X OPTIONS -v http://localhost:8080/channels
// curl -H "Origin: *" -H "Access-Control-Request-Method: POST" -X OPTIONS -v
// http://localhost:8080/channels
this.mockMvc.perform(
MockMvcRequestBuilders
.options(QueryRestController.PATH_CHANNELS)
@ -210,7 +212,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.header().string("Access-Control-Allow-Origin", "*"));
// curl -H "Origin: http://localhost:8080" -H "Access-Control-Request-Method: POST" -X OPTIONS -v http://localhost:8080/channels
// curl -H "Origin: http://localhost:8080" -H "Access-Control-Request-Method: POST" -X OPTIONS
// -v http://localhost:8080/channels
this.mockMvc.perform(
MockMvcRequestBuilders
.options(QueryRestController.PATH_CHANNELS)
@ -288,10 +291,10 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
new RequestRangePulseId(
100,
199),
new AggregationDescriptor().setNrOfBins(2),
TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId);
request.addField(QueryField.eventCount);
request.setNrOfBins(2);
String content = mapper.writeValueAsString(request);
System.out.println(content);
@ -511,7 +514,7 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
100,
101),
Ordering.asc,
AggregationType.extrema,
new AggregationDescriptor(AggregationType.extrema),
TEST_CHANNEL_NAMES[0]);
String content = mapper.writeValueAsString(request);
@ -554,8 +557,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
new RequestRangeDate(
startDate,
endDate),
new AggregationDescriptor().setNrOfBins(2),
TEST_CHANNEL_01);
request.setNrOfBins(2);
String content = mapper.writeValueAsString(request);
System.out.println(content);
@ -595,8 +598,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
new RequestRangeDate(
startDate,
endDate),
new AggregationDescriptor().setMsPerBin(100),
TEST_CHANNEL_01);
request.setBinSize(100);
String content = mapper.writeValueAsString(request);
System.out.println(content);
@ -701,4 +704,70 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.header().string("Content-Disposition", "attachment; filename=data.json"));
}
@Test
public void testBadAggregation_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
10,
11),
new AggregationDescriptor().setMsPerBin(1000),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(QueryRestController.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isBadRequest());
}
@Test
public void testBadAggregation_02() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeTime(
TimeUtils.getTimeFromMillis(0, 0),
TimeUtils.getTimeFromMillis(10, 0)),
new AggregationDescriptor().setPulsesPerBin(100),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(QueryRestController.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isBadRequest());
}
@Test
public void testBadAggregation_03() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
10,
11),
new AggregationDescriptor().setMsPerBin(1000).setNrOfBins(100),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(QueryRestController.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isBadRequest());
}
}