Introduce limit and inclusive queries.

This commit is contained in:
Fabian Märki
2017-11-07 14:45:37 +01:00
parent 714c9b2e3d
commit 7b71a2a394
5 changed files with 728 additions and 7 deletions

View File

@ -326,6 +326,7 @@ A request is performed by sending a valid JSON object in the HTTP request body.
- **channels**: Array of channels to be queried (see [here](Readme.md#query_channel_names) and [here](Readme.md#define_channel_names)).
- **range**: The range of the query (see [here](Readme.md#query_range)).
- **limit**: An optional limit for the number of elements to retrieve. Limit together with aggregation does not make sense and thus is not supported.
- **ordering**: The ordering of the data (see [here](Readme.md#data_ordering)).
- **fields**: Array of requested fields (see [here](Readme.md#requested_fields)).
- **aggregation**: Setting this attribute activates data aggregation (see [here](Readme.md#data_aggregation) for its specification).
@ -374,12 +375,16 @@ Queries are applied to a range. The following types of ranges are supported.
```json
"range":{
"startPulseId":0,
"endPulseId":100
"startInclusive":true,
"endPulseId":100,
"endInclusive":true
}
```
- **startPulseId**: The start pulse-id of the range request.
- **startInclusive**: Defines if the start should be considered inclusive (values: **true**|false).
- **endPulseId**: The end pulse-id of the range request.
- **endInclusive**: Defines if the end should be considered inclusive (values: **true**|false).
#### By Date
@ -387,12 +392,16 @@ Queries are applied to a range. The following types of ranges are supported.
```json
"range":{
"startDate":"2015-08-06T18:00:00.000",
"endDate":"2015-08-06T18:59:59.999"
"startInclusive":true,
"endDate":"2015-08-06T18:59:59.999",
"endInclusive":true
}
```
- **startDate**: The start date of the time range in the ISO8601 format (such as 1997-07-16T19:20:30.123+02:00 or 1997-07-16T19:20:30.123456789+02:00 (omitting +02:00 falls back to the server's time zone)).
- **startInclusive**: Defines if the start should be considered inclusive (values: **true**|false).
- **endDate**: The end date of the time range.
- **endInclusive**: Defines if the end should be considered inclusive (values: **true**|false).
#### By Time
@ -400,12 +409,16 @@ Queries are applied to a range. The following types of ranges are supported.
```json
"range":{
"startSeconds":"0.0",
"endSeconds":"1.000999999"
"startInclusive":true,
"endSeconds":"1.000999999",
"endInclusive":true
}
```
- **startSeconds**: The start time of the range in seconds since midnight, January 1, 1970 UTC (the UNIX epoch) as a decimal value including fractional seconds.
- **startInclusive**: Defines if the start should be considered inclusive (values: **true**|false).
- **endSeconds**: The end time of the range in seconds.
- **endInclusive**: Defines if the end should be considered inclusive (values: **true**|false).
<a name="data_ordering"/>

View File

@ -1,5 +1,6 @@
package ch.psi.daq.queryrest.controller.validator;
import java.math.BigDecimal;
import java.util.LinkedHashSet;
import java.util.Set;
@ -9,15 +10,20 @@ import org.springframework.context.ApplicationContextAware;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.operation.ConfigField;
import ch.psi.daq.domain.request.range.RequestRange;
import ch.psi.daq.queryrest.config.QueryRestConfig;
public class ConfigQueryValidator implements Validator, ApplicationContextAware {
private Set<ConfigField> queryResponseFields;
private long maxPulseRange;
private BigDecimal maxTimeRange;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
@ -25,6 +31,9 @@ public class ConfigQueryValidator implements Validator, ApplicationContextAware
context = backend.getApplicationContext();
queryResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY, Set.class);
maxPulseRange = context.getBean(DomainConfig.BEAN_NAME_QUERY_MAX_PULSE_RANGE, Long.class);
maxTimeRange = context.getBean(DomainConfig.BEAN_NAME_QUERY_MAX_TIME_RANGE, BigDecimal.class);
}
@Override
@ -44,5 +53,30 @@ public class ConfigQueryValidator implements Validator, ApplicationContextAware
if (query.getFields() == null || query.getFields().isEmpty()) {
query.setFields(new LinkedHashSet<>(queryResponseFields));
}
RequestRange range = query.getRange();
if (query.hasLimit()) {
if (Ordering.asc.equals(query.getOrdering())) {
if (range.isPulseIdRangeDefined() && range.getStartPulseId() < 0) {
errors.reject("limit",
"Pulse based query with limit and asc order requires start pulse-id to be defined.");
}
if (range.isTimeRangeDefined() && range.getStartTime() == null) {
errors.reject("limit",
"Time based query with limit and asc order requires start time to be defined.");
}
} else if (Ordering.desc.equals(query.getOrdering())) {
if (range.isPulseIdRangeDefined() && range.getEndPulseId() <= 0) {
errors.reject("limit",
"Pulse based query with limit and desc order requires end pulse-id to be defined.");
}
if (range.isTimeRangeDefined() && range.getEndTime() == null) {
errors.reject("limit", "Time based query with limit and desc order requires end time to be defined.");
}
}
}
range.validate(maxPulseRange, maxTimeRange, query.getOrdering(), query.getLimit());
query.setRange(range);
}
}

View File

@ -1,5 +1,6 @@
package ch.psi.daq.queryrest.controller.validator;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.Set;
@ -10,6 +11,7 @@ import org.springframework.context.ApplicationContextAware;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.query.DAQQueries;
@ -20,11 +22,14 @@ import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.transform.ExecutionEnvironment;
import ch.psi.daq.domain.query.transform.ValueTransformationSequence;
import ch.psi.daq.domain.request.Request;
import ch.psi.daq.domain.request.range.RequestRange;
import ch.psi.daq.queryrest.config.QueryRestConfig;
public class EventQueryValidator implements Validator, ApplicationContextAware {
private Set<EventField> defaultResponseFields;
private Set<Aggregation> defaultResponseAggregations;
private long maxPulseRange;
private BigDecimal maxTimeRange;
@SuppressWarnings("unchecked")
@Override
@ -33,7 +38,11 @@ public class EventQueryValidator implements Validator, ApplicationContextAware {
context = backend.getApplicationContext();
defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS, Set.class);
defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class);
defaultResponseAggregations =
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class);
maxPulseRange = context.getBean(DomainConfig.BEAN_NAME_QUERY_MAX_PULSE_RANGE, Long.class);
maxTimeRange = context.getBean(DomainConfig.BEAN_NAME_QUERY_MAX_TIME_RANGE, BigDecimal.class);
}
@Override
@ -59,6 +68,31 @@ public class EventQueryValidator implements Validator, ApplicationContextAware {
query.setFields(new LinkedHashSet<>(defaultResponseFields));
}
RequestRange range = query.getRange();
if (query.hasLimit()) {
if (Ordering.asc.equals(query.getOrdering())) {
if (range.isPulseIdRangeDefined() && range.getStartPulseId() < 0) {
errors.reject("limit",
"Pulse based query with limit and asc order requires start pulse-id to be defined.");
}
if (range.isTimeRangeDefined() && range.getStartTime() == null) {
errors.reject("limit",
"Time based query with limit and asc order requires start time to be defined.");
}
} else if (Ordering.desc.equals(query.getOrdering())) {
if (range.isPulseIdRangeDefined() && range.getEndPulseId() <= 0) {
errors.reject("limit",
"Pulse based query with limit and desc order requires end pulse-id to be defined.");
}
if (range.isTimeRangeDefined() && range.getEndTime() == null) {
errors.reject("limit", "Time based query with limit and desc order requires end time to be defined.");
}
}
}
range.validate(maxPulseRange, maxTimeRange, query.getOrdering(), query.getLimit());
query.setRange(range);
if (query.getAggregation() != null) {
// check if only one binning element is defined
final long durationPerBin = query.getAggregation().getDurationPerBin();
@ -81,12 +115,15 @@ public class EventQueryValidator implements Validator, ApplicationContextAware {
errors.reject("pulsesPerBin", "Time range queries only support time based binning.");
}
// set default values (if not set)
if (query.getAggregation().getAggregations() == null || query.getAggregation().getAggregations().isEmpty()) {
query.getAggregation().setAggregations(new ArrayList<>(defaultResponseAggregations));
}
if (query.hasLimit()) {
errors.reject("limit", "Aggregation with limit does not make sense.");
}
// without adding this field, user need to explicitly ask for value field when querying
// aggregations.
// if (!query.getFields().contains(QueryField.value)) {

View File

@ -47,7 +47,13 @@ import ch.psi.daq.domain.query.transform.image.color.TypedColorModel;
import ch.psi.daq.domain.query.transform.image.encoding.Base64ImageEncoder;
import ch.psi.daq.domain.query.transform.sampling.ValueSamplerValueTransformation;
import ch.psi.daq.domain.request.range.RequestRangeDate;
import ch.psi.daq.domain.request.range.RequestRangeEndDate;
import ch.psi.daq.domain.request.range.RequestRangeEndPulseId;
import ch.psi.daq.domain.request.range.RequestRangeEndTime;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeStartDate;
import ch.psi.daq.domain.request.range.RequestRangeStartPulseId;
import ch.psi.daq.domain.request.range.RequestRangeStartTime;
import ch.psi.daq.domain.request.range.RequestRangeTime;
import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
@ -609,6 +615,639 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)));
}
@Test
public void testOpenTimeRangeQueryStart_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartTime(
TimeUtils.getTimeFromMillis(2000, 0)),
TEST_CHANNEL_NAMES);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryStart_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartTime(
TimeUtils.getTimeFromMillis(2000, 0),
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryStartDate_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartDate(
TimeUtils.format(TimeUtils.getTimeFromMillis(2000, 0)),
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryStart_02() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartTime(
TimeUtils.getTimeFromMillis(2000, 0)),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
try {
// limit needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenTimeRangeQueryStart_03() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartTime(
TimeUtils.getTimeFromMillis(2000, 0)),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
try {
// asc needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenTimeRangeQueryEnd_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndTime(
TimeUtils.getTimeFromMillis(2010, 0)),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryEnd_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndTime(
TimeUtils.getTimeFromMillis(2020, 0),
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryEndDate_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndDate(
TimeUtils.format(TimeUtils.getTimeFromMillis(2020, 0)),
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenTimeRangeQueryEnd_02() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndTime(
TimeUtils.getTimeFromMillis(2010, 0)),
TEST_CHANNEL_NAMES);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
try {
// limit needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenTimeRangeQueryEnd_03() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndTime(
TimeUtils.getTimeFromMillis(2010, 0)),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.asc);
String content = mapper.writeValueAsString(request);
try {
// desc needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenPulseRangeQueryStart_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartPulseId(
200),
TEST_CHANNEL_NAMES);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenPulseRangeQueryStart_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartPulseId(
200,
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(202))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 20000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenPulseRangeQueryStart_02() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartPulseId(
200),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
try {
// limit needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenPulseRangeQueryStart_03() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeStartPulseId(
200),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
try {
// asc needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenPulseRangeQueryEnd_01() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndPulseId(
201),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenPulseRangeQueryEnd_01_Exclusive() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndPulseId(
202,
false),
TEST_CHANNEL_NAMES);
request.setLimit(2);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[2]").doesNotExist());
}
@Test
public void testOpenPulseRangeQueryEnd_02() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndPulseId(
201),
TEST_CHANNEL_NAMES);
request.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(request);
try {
// limit needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testOpenPulseRangeQueryEnd_03() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeEndPulseId(
201),
TEST_CHANNEL_NAMES);
request.setOrdering(Ordering.asc);
request.setLimit(2);
String content = mapper.writeValueAsString(request);
try {
// desc needs to be defined
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk());
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
}
}
@Test
public void testExtremaAggregation() throws Exception {

View File

@ -112,6 +112,4 @@ public class ResponseQueryTest extends AbstractDaqRestTest {
assertNull(deserial.getResponse());
}
}