ATEST-311

This commit is contained in:
Fabian Märki
2016-03-08 18:05:09 +01:00
parent 80cc0acc2d
commit d50425082a
9 changed files with 321 additions and 269 deletions

View File

@ -5,11 +5,6 @@
<projects> <projects>
</projects> </projects>
<buildSpec> <buildSpec>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand> <buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name> <name>org.eclipse.jdt.core.javabuilder</name>
<arguments> <arguments>
@ -25,6 +20,5 @@
<nature>org.springframework.ide.eclipse.core.springnature</nature> <nature>org.springframework.ide.eclipse.core.springnature</nature>
<nature>org.springsource.ide.eclipse.gradle.core.nature</nature> <nature>org.springsource.ide.eclipse.gradle.core.nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature> <nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
</natures> </natures>
</projectDescription> </projectDescription>

View File

@ -1,5 +1,5 @@
# #
#Wed Feb 17 10:04:43 CET 2016 #Tue Mar 08 14:20:11 CET 2016
org.eclipse.jdt.core.compiler.debug.localVariable=generate org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.compliance=1.8 org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve

100
Readme.md
View File

@ -125,33 +125,25 @@ Queries are applied to a range. The following types of ranges ranges are support
```json ```json
"range":{ "range":{
"startDate":"2015-08-06T18:00:00.000", "startDate":"2015-08-06T18:00:00.000",
"startNanos":0,
"endDate":"2015-08-06T18:59:59.999", "endDate":"2015-08-06T18:59:59.999",
"endNanos":999999
} }
``` ```
- **startDate**: The start date of the time range (ISO8601 format (YYYY-MM-DDThh:mm:ss.sTZD e.g. 1997-07-16T19:20:30.475+02:00 (omitting +02:00 falls back to the local time zone)). - **startDate**: The start date of the time range (ISO8601 format (YYYY-MM-DDThh:mm:ss.sTZD e.g. 1997-07-16T19:20:30.475+02:00 (omitting +02:00 falls back to the local time zone)).
- **[startNanos]**: The optional nanosecond offset to the milliseconds (range [0..999999]).
- **endDate**: The end date of the time range. - **endDate**: The end date of the time range.
- **[endNanos]**: The optional nanosecond offset.
### By Time ### By Time
```json ```json
"range":{ "range":{
"startMillis":0, "startSeconds":"0.0",
"startNanos":0, "endSeconds":"1.000999999"
"endMillis":100,
"endNanos":999999
} }
``` ```
- **startMillis**: The start time of the range in milliseconds since January 1, 1970 (the UNIX/JAVA epoch). - **startSeconds**: The start time of the range in seconds since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds.
- **[startNanos]**: The optional nanosecond offset to the milliseconds (range [0..999999]). - **endSeconds**: The end time of the range in seconds since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds.
- **endMillis**: The end time of the range.
- **[endNanos]**: The optional nanosecond offset.
<a name="query_channel_names"/> <a name="query_channel_names"/>
@ -235,44 +227,36 @@ The following examples build on waveform data (see below). They also work for sc
"channel":"Channel_01", "channel":"Channel_01",
"data":[ "data":[
{ {
"iocMillis":0, "iocSeconds":"0.000000000",
"iocNanos":0,
"pulseId":0, "pulseId":0,
"globalMillis":0, "globalSeconds":"0.000000000",
"globalNanos":0,
"shape":[ "shape":[
4 4
], ],
"value":[1,2,3,4] "value":[1,2,3,4]
}, },
{ {
"iocMillis":10, "iocSeconds":"0.010000000",
"iocNanos":0,
"pulseId":1, "pulseId":1,
"globalMillis":10, "globalSeconds":"0.010000000",
"globalNanos":0,
"shape":[ "shape":[
4 4
], ],
"value":[2,3,4,5] "value":[2,3,4,5]
}, },
{ {
"iocMillis":20, "iocSeconds":"0.020000000",
"iocNanos":0,
"pulseId":2, "pulseId":2,
"globalMillis":20, "globalSeconds":"0.020000000",
"globalNanos":0,
"shape":[ "shape":[
4 4
], ],
"value":[3,4,5,6] "value":[3,4,5,6]
}, },
{ {
"iocMillis":30, "iocSeconds":"0.030000000",
"iocNanos":0,
"pulseId":3, "pulseId":3,
"globalMillis":30, "globalSeconds":"0.030000000",
"globalNanos":0,
"shape":[ "shape":[
4 4
], ],
@ -316,10 +300,8 @@ See JSON representation of the data above.
```json ```json
{ {
"range":{ "range":{
"startMillis":0, "startSeconds":"0.0",
"startNanos":0, "endSeconds":"0.030999999"
"endMillis":30,
"endNanos":999999
}, },
"channels":[ "channels":[
"Channel_01" "Channel_01"
@ -330,7 +312,7 @@ See JSON representation of the data above.
###### Command ###### Command
```bash ```bash
curl -H "Content-Type: application/json" -X POST -d '{"range":{"startMillis":0,"startNanos":0,"endMillis":30,"endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query curl -H "Content-Type: application/json" -X POST -d '{"range":{"startSeconds":"0.0","endSeconds":"0.030999999"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
``` ```
###### Response ###### Response
@ -345,9 +327,7 @@ See JSON representation of the data above.
{ {
"range":{ "range":{
"startDate":"1970-01-01T01:00:00.000", "startDate":"1970-01-01T01:00:00.000",
"startNanos":0, "endDate":"1970-01-01T01:00:00.030"
"endDate":"1970-01-01T01:00:00.030",
"endNanos":999999
}, },
"channels":[ "channels":[
"Channel_01" "Channel_01"
@ -360,7 +340,7 @@ Supported format is ISO8601 *YYYY-MM-DDThh:mm:ss.sTZD* (e.g. *1997-07-16T19:20:3
###### Command ###### Command
```bash ```bash
curl -H "Content-Type: application/json" -X POST -d '{"range":{"startDate":"1970-01-01T01:00:00.000","startNanos":0,"endDate":"1970-01-01T01:00:00.030","endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query curl -H "Content-Type: application/json" -X POST -d '{"range":{"startDate":"1970-01-01T01:00:00.000","endDate":"1970-01-01T01:00:00.030"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
``` ```
###### Response ###### Response
@ -375,10 +355,8 @@ See JSON representation of the data above.
{ {
"backend":"archiverappliance", "backend":"archiverappliance",
"range":{ "range":{
"startMillis":0, "startSeconds":"0.0",
"startNanos":0, "endSeconds":"0.030999999"
"endMillis":30,
"endNanos":999999
}, },
"channels":[ "channels":[
"Channel_01" "Channel_01"
@ -392,7 +370,7 @@ Archiver Appliance supports queries by *time range* and *date range* only (as it
###### Command ###### Command
```bash ```bash
curl -H "Content-Type: application/json" -X POST -d '{"dbmode":"archiverappliance","range":{"startMillis":0,"startNanos":0,"endMillis":30,"endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query curl -H "Content-Type: application/json" -X POST -d '{"dbmode":"archiverappliance","range":{"startSeconds":"0.0","endSeconds":"0.030999999"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
``` ```
###### Response ###### Response
@ -453,10 +431,8 @@ curl --compressed -H "Content-Type: application/json" -X POST -d '{"compression"
"fields":[ "fields":[
"channel", "channel",
"pulseId", "pulseId",
"iocMillis", "iocSeconds",
"iocNanos", "globalSeconds",
"globalMillis",
"globalNanos",
"shape", "shape",
"eventCount", "eventCount",
"value" "value"
@ -464,10 +440,12 @@ curl --compressed -H "Content-Type: application/json" -X POST -d '{"compression"
} }
``` ```
It is possible to request the time in seconds (since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds - using fields *globalSeconds* and *iocSeconds*) or in milliseconds (since January 1, 1970 (the JAVA epoch) - using fields *globalMillis* and *iocMillis*)
###### Command ###### Command
```bash ```bash
curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","range":{"startPulseId":0,"endPulseId":4},"channels":["channel1","channel2"],"fields":["channel","pulseId","iocMillis","iocNanos","globalMillis","globalNanos","shape","eventCount","value"]}' http://data-api.psi.ch/sf/query curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","range":{"startPulseId":0,"endPulseId":4},"channels":["channel1","channel2"],"fields":["channel","pulseId","iocSeconds","globalSeconds","shape","eventCount","value"]}' http://data-api.psi.ch/sf/query
``` ```
###### Response ###### Response
@ -475,17 +453,17 @@ curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","ra
The response is in CSV. The response is in CSV.
```text ```text
channel;pulseId;iocMillis;iocNanos;globalMillis;globalNanos;shape;eventCount;value channel;pulseId;iocSeconds;globalSeconds;shape;eventCount;value
testChannel1;0;0;0;0;0;[1];1;0 testChannel1;0;0.000000000;0.000000000;[1];1;0
testChannel1;1;10;0;10;0;[1];1;1 testChannel1;1;0.010000000;0.010000000;[1];1;1
testChannel1;2;20;0;20;0;[1];1;2 testChannel1;2;0.020000000;0.020000000;[1];1;2
testChannel1;3;30;0;30;0;[1];1;3 testChannel1;3;0.030000000;0.030000000;[1];1;3
testChannel1;4;40;0;40;0;[1];1;4 testChannel1;4;0.040000000;0.040000000;[1];1;4
testChannel2;0;0;0;0;0;[1];1;0 testChannel2;0;0.000000000;0.000000000;[1];1;0
testChannel2;1;10;0;10;0;[1];1;1 testChannel2;1;0.010000000;0.010000000;[1];1;1
testChannel2;2;20;0;20;0;[1];1;2 testChannel2;2;0.020000000;0.020000000;[1];1;2
testChannel2;3;30;0;30;0;[1];1;3 testChannel2;3;0.030000000;0.030000000;[1];1;3
testChannel2;4;40;0;40;0;[1];1;4 testChannel2;4;0.040000000;0.040000000;[1];1;4
``` ```
##### Querying for Specific Fields ##### Querying for Specific Fields
@ -735,8 +713,8 @@ Array value [aggregations](https://github.psi.ch/projects/ST/repos/ch.psi.daq.qu
"aggregations":["min","max","mean"], "aggregations":["min","max","mean"],
"fields":["globalMillis","value"], "fields":["globalMillis","value"],
"range":{ "range":{
"globalMillis":0, "startSeconds":"0.0",
"globalMillis":3 "endSeconds":"0.030000000"
}, },
"channels":[ "channels":[
"Channel_01" "Channel_01"
@ -747,7 +725,7 @@ Array value [aggregations](https://github.psi.ch/projects/ST/repos/ch.psi.daq.qu
###### Command ###### Command
```bash ```bash
curl -H "Content-Type: application/json" -X POST -d '{"binSize":10,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["globalMillis","value"],"range":{"globalMillis":0,"globalMillis":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query curl -H "Content-Type: application/json" -X POST -d '{"binSize":10,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["globalMillis","value"],"range":{"startSeconds":"0.0","endSeconds":"0.030000000"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
``` ```
###### Response ###### Response

View File

@ -1,6 +1,6 @@
# defines the fields that are included in the response # defines the fields that are included in the response
# if no fields have been specified by the user # if no fields have been specified by the user
queryrest.default.response.fields=channel,pulseId,globalMillis,globalNanos,iocMillis,iocNanos,shape,eventCount,value queryrest.default.response.fields=channel,pulseId,globalSeconds,shape,eventCount,value
# aggregation which are included in the response by default if aggregation is enabled for a given query # aggregation which are included in the response by default if aggregation is enabled for a given query
queryrest.default.response.aggregations=min,mean,max queryrest.default.response.aggregations=min,mean,max

View File

@ -12,13 +12,11 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupp
import ch.psi.daq.cassandra.reader.CassandraReader; import ch.psi.daq.cassandra.reader.CassandraReader;
import ch.psi.daq.cassandra.util.test.CassandraDataGen; import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.domain.reader.DataReader;
import ch.psi.daq.query.processor.QueryProcessor; import ch.psi.daq.query.processor.QueryProcessor;
import ch.psi.daq.query.processor.QueryProcessorLocal; import ch.psi.daq.query.processor.QueryProcessorLocal;
import ch.psi.daq.test.cassandra.admin.CassandraTestAdmin; import ch.psi.daq.test.cassandra.admin.CassandraTestAdmin;
import ch.psi.daq.test.cassandra.admin.CassandraTestAdminImpl; import ch.psi.daq.test.cassandra.admin.CassandraTestAdminImpl;
import ch.psi.daq.test.query.config.LocalQueryTestConfig; import ch.psi.daq.test.query.config.LocalQueryTestConfig;
import ch.psi.daq.test.queryrest.query.DummyArchiverApplianceReader;
import ch.psi.daq.test.queryrest.query.DummyCassandraReader; import ch.psi.daq.test.queryrest.query.DummyCassandraReader;
@Configuration @Configuration

View File

@ -29,6 +29,8 @@ import ch.psi.daq.cassandra.request.range.RequestRangePulseId;
import ch.psi.daq.cassandra.request.range.RequestRangeTime; import ch.psi.daq.cassandra.request.range.RequestRangeTime;
import ch.psi.daq.cassandra.util.test.CassandraDataGen; import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.query.model.Aggregation; import ch.psi.daq.query.model.Aggregation;
import ch.psi.daq.query.model.AggregationType; import ch.psi.daq.query.model.AggregationType;
import ch.psi.daq.query.model.Compression; import ch.psi.daq.query.model.Compression;
@ -79,14 +81,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -125,10 +127,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name())); assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name())); assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@ -170,14 +176,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -218,10 +224,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name())); assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name())); assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@ -257,14 +267,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -301,10 +311,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(channelName, customerMap.get(QueryField.channel.name())); assertEquals(channelName, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[2048]", customerMap.get(QueryField.shape.name())); assertEquals("[2048]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name())); assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertTrue(customerMap.get(QueryField.value.name()).toString().startsWith("[")); assertTrue(customerMap.get(QueryField.value.name()).toString().startsWith("["));
@ -321,8 +335,8 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
public void testTimeRangeQuery() throws Exception { public void testTimeRangeQuery() throws Exception {
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
new RequestRangeTime( new RequestRangeTime(
0, TimeUtils.getTimeFromMillis(0, 0),
10), TimeUtils.getTimeFromMillis(10, 0)),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.setResponseFormat(ResponseFormat.CSV); request.setResponseFormat(ResponseFormat.CSV);
@ -332,14 +346,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -378,10 +392,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name())); assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name())); assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@ -418,14 +436,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -464,10 +482,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name())); assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name())); assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@ -567,14 +589,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -618,10 +640,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("5", customerMap.get(QueryField.eventCount.name())); assertEquals("5", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name())); assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name()));
@ -655,14 +681,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId); queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis); queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocNanos); queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis); queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalNanos);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape); queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull()); cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount); queryFields.add(QueryField.eventCount);
@ -707,10 +733,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) { while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name())); assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name())); assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name())); assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.iocNanos.name())); customerMap.get(QueryField.iocSeconds.name()));
assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name())); assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
assertEquals("0", customerMap.get(QueryField.globalNanos.name())); customerMap.get(QueryField.iocMillis.name()));
assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalSeconds.name()));
assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name())); assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("10", customerMap.get(QueryField.eventCount.name())); assertEquals("10", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name())); assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name()));

View File

@ -15,10 +15,13 @@ import ch.psi.daq.cassandra.request.range.RequestRangePulseId;
import ch.psi.daq.cassandra.request.range.RequestRangeTime; import ch.psi.daq.cassandra.request.range.RequestRangeTime;
import ch.psi.daq.cassandra.util.test.CassandraDataGen; import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.reader.Backend; import ch.psi.daq.domain.reader.Backend;
import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.query.model.AggregationType; import ch.psi.daq.query.model.AggregationType;
import ch.psi.daq.query.model.Compression; import ch.psi.daq.query.model.Compression;
import ch.psi.daq.query.model.QueryField;
import ch.psi.daq.query.model.impl.DAQQueries; import ch.psi.daq.query.model.impl.DAQQueries;
import ch.psi.daq.query.model.impl.DAQQuery; import ch.psi.daq.query.model.impl.DAQQuery;
import ch.psi.daq.query.model.impl.DAQQueryElement; import ch.psi.daq.query.model.impl.DAQQueryElement;
@ -67,16 +70,16 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("BoolScalar")) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("BoolScalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").value("BoolWaveform")) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").value("BoolWaveform"))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name())) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").exists())
// .andExpect( // .andExpect(
// MockMvcResultMatchers.jsonPath("$[1].channels[0]").value(DummyArchiverApplianceReader.TEST_CHANNEL_1)) // MockMvcResultMatchers.jsonPath("$[1].channels[0]").value(DummyArchiverApplianceReader.TEST_CHANNEL_1))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[1]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[1]").exists())
// .andExpect( // .andExpect(
// MockMvcResultMatchers.jsonPath("$[1].channels[1]").value(DummyArchiverApplianceReader.TEST_CHANNEL_2)) // MockMvcResultMatchers.jsonPath("$[1].channels[1]").value(DummyArchiverApplianceReader.TEST_CHANNEL_2))
; ;
} }
@ -100,11 +103,11 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").value("UInt32Scalar")) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").value("UInt32Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").value("UInt32Waveform")) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").value("UInt32Waveform"))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name())) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").doesNotExist()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").doesNotExist())
; ;
} }
@Test @Test
@ -156,12 +159,12 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[23]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[23]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").doesNotExist()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").doesNotExist())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name())) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[2]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[2]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").doesNotExist()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").doesNotExist())
; ;
// each reload add another channel // each reload add another channel
request.setReload(true); request.setReload(true);
@ -182,12 +185,12 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[25]").doesNotExist()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[25]").doesNotExist())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name())) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").exists()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[4]").doesNotExist()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[4]").doesNotExist())
; ;
} }
@Test @Test
@ -279,9 +282,14 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
public void testPulseRangeQuery() throws Exception { public void testPulseRangeQuery() throws Exception {
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
new RequestRangePulseId( new RequestRangePulseId(
10, 100,
11), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId);
request.addField(QueryField.globalSeconds);
request.addField(QueryField.globalMillis);
request.addField(QueryField.iocSeconds);
request.addField(QueryField.iocMillis);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -297,25 +305,45 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(1000))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].iocSeconds").value(
TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].iocMillis").value(1000))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(1010))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].iocSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].iocMillis").value(1010))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110)); .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(1000))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].iocSeconds").value(
TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].iocMillis").value(1000))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(1010))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocMillis").value(1010));
} }
@Test @Test
public void testPulseRangeQueryBackends() throws Exception { public void testPulseRangeQueryBackends() throws Exception {
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
new RequestRangePulseId( new RequestRangePulseId(
10, 100,
11), 101),
new ChannelName(TEST_CHANNEL_01, Backend.databuffer), new ChannelName(TEST_CHANNEL_01, Backend.databuffer),
new ChannelName(TEST_CHANNEL_02, Backend.archiverappliance)); new ChannelName(TEST_CHANNEL_02, Backend.archiverappliance));
@ -334,19 +362,21 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap()) TestTimeUtils.getTimeStr(1, 10000000)))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02)) // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray()) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10)) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100)) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11)) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110)) // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
; // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110))
;
} }
@Test @Test
@ -355,13 +385,13 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
DAQQueries request = new DAQQueries( DAQQueries request = new DAQQueries(
new DAQQueryElement( new DAQQueryElement(
new RequestRangePulseId( new RequestRangePulseId(
10, 100,
11), 101),
TEST_CHANNEL_NAMES), TEST_CHANNEL_NAMES),
new DAQQueryElement( new DAQQueryElement(
new RequestRangePulseId( new RequestRangePulseId(
10, 100,
11), 101),
testChannel3)); testChannel3));
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -380,33 +410,39 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0]").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0][0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].channel.name").value(TEST_CHANNEL_02)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[1]").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[1][0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].channel.name").value(testChannel3)) .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].channel.name").value(testChannel3))
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].globalMillis").value(110)); .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)));
} }
@Test @Test
public void testTimeRangeQuery() throws Exception { public void testTimeRangeQuery() throws Exception {
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
new RequestRangeTime( new RequestRangeTime(
100, TimeUtils.getTimeFromMillis(2000, 0),
110), TimeUtils.getTimeFromMillis(2010, 0)),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -423,24 +459,28 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap()) .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(200))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(2, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110)); .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(201))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(2, 10000000)));
} }
@Test @Test
public void testDateRangeQuery() throws Exception { public void testDateRangeQuery() throws Exception {
String startDate = RequestRangeDate.format(100); String startDate = RequestRangeDate.format(1000);
String endDate = RequestRangeDate.format(110); String endDate = RequestRangeDate.format(1010);
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
new RequestRangeDate( new RequestRangeDate(
startDate, startDate,
@ -464,18 +504,22 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value("databuffer")) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value("databuffer"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists()) .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value("databuffer")) .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value("databuffer"))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11)) TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110)); .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 10000000)));
} }
@Test @Test
@ -520,8 +564,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
@Test @Test
public void testDateRangeQueryNrOfBinsAggregate() throws Exception { public void testDateRangeQueryNrOfBinsAggregate() throws Exception {
long startTime = 100; long startTime = 1000;
long endTime = 199; long endTime = 1099;
String startDate = RequestRangeDate.format(startTime); String startDate = RequestRangeDate.format(startTime);
String endDate = RequestRangeDate.format(endTime); String endDate = RequestRangeDate.format(endTime);
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
@ -549,18 +593,20 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name())) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(15)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(150)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(1, 50000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5)); .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5));
} }
@Test @Test
public void testDateRangeQueryBinSizeAggregate() throws Exception { public void testDateRangeQueryBinSizeAggregate() throws Exception {
long startTime = 1000; long startTime = 10000;
long endTime = 1999; long endTime = 10999;
String startDate = RequestRangeDate.format(startTime); String startDate = RequestRangeDate.format(startTime);
String endDate = RequestRangeDate.format(endTime); String endDate = RequestRangeDate.format(endTime);
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(
@ -588,35 +634,45 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name())) .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray()) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(1000))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(1000)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(110)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(1010))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(1100)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 100000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].pulseId").value(120)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].pulseId").value(1020))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].globalMillis").value(1200)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 200000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].pulseId").value(130)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].pulseId").value(1030))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].globalMillis").value(1300)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 300000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].pulseId").value(140)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].pulseId").value(1040))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].globalMillis").value(1400)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 400000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].pulseId").value(150)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].pulseId").value(1050))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].globalMillis").value(1500)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 500000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].pulseId").value(160)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].pulseId").value(1060))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].globalMillis").value(1600)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 600000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].pulseId").value(170)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].pulseId").value(1070))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].globalMillis").value(1700)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 700000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].pulseId").value(180)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].pulseId").value(1080))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].globalMillis").value(1800)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 800000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].eventCount").value(10)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].eventCount").value(10))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].pulseId").value(190)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].pulseId").value(1090))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].globalMillis").value(1900)) .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].globalSeconds").value(
TestTimeUtils.getTimeStr(10, 900000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].eventCount").value(10)); .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].eventCount").value(10));
} }

View File

@ -1,5 +1,6 @@
package ch.psi.daq.test.queryrest.query; package ch.psi.daq.test.queryrest.query;
import java.math.BigDecimal;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -10,6 +11,7 @@ import java.util.stream.Stream;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent; import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelEvent; import ch.psi.daq.domain.cassandra.ChannelEvent;
import ch.psi.daq.domain.reader.Backend; import ch.psi.daq.domain.reader.Backend;
@ -51,25 +53,25 @@ public class DummyArchiverApplianceReader implements DataReader {
} }
@Override @Override
public Stream<? extends DataEvent> getEventStream(String channel, long startMillis, long startNanos, long endMillis, public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
long endNanos, Ordering ordering, boolean aggregateValues, String... columns) { Ordering ordering, boolean aggregateValues, String... columns) {
return getElements(channel, startMillis / 10, endMillis / 10); return getElements(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
} }
protected Stream<? extends DataEvent> getElements(String channel, long start, long end) { protected Stream<? extends DataEvent> getElements(String channel, long start, long end) {
String channelLower = channel.toLowerCase(); String channelLower = channel.toLowerCase();
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(start, end).mapToObj(i -> { Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(start, end).mapToObj(i -> {
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
if (channelLower.contains("waveform")) { if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray(); long[] value = random.longs(2048).toArray();
value[0] = i; value[0] = i;
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
1, 1,
value value
); );
@ -82,11 +84,9 @@ public class DummyArchiverApplianceReader implements DataReader {
value[0] = i; value[0] = i;
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
1, 1,
value, value,
shape shape
@ -94,11 +94,9 @@ public class DummyArchiverApplianceReader implements DataReader {
} else { } else {
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
1, 1,
i i
); );

View File

@ -3,6 +3,7 @@
*/ */
package ch.psi.daq.test.queryrest.query; package ch.psi.daq.test.queryrest.query;
import java.math.BigDecimal;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
@ -21,6 +22,7 @@ import com.google.common.collect.Lists;
import ch.psi.daq.cassandra.reader.CassandraReader; import ch.psi.daq.cassandra.reader.CassandraReader;
import ch.psi.daq.cassandra.util.test.CassandraDataGen; import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent; import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelConfiguration; import ch.psi.daq.domain.cassandra.ChannelConfiguration;
import ch.psi.daq.domain.cassandra.ChannelEvent; import ch.psi.daq.domain.cassandra.ChannelEvent;
@ -107,9 +109,9 @@ public class DummyCassandraReader implements CassandraReader {
* @{inheritDoc * @{inheritDoc
*/ */
@Override @Override
public Stream<? extends DataEvent> getEventStream(String channel, long startMillis, long startNanos, long endMillis, public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
long endNanos, Ordering ordering, boolean aggregateValues, String... columns) { Ordering ordering, boolean aggregateValues, String... columns) {
return getDummyEventStream(channel, startMillis / 10, endMillis / 10); return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
} }
/** /**
@ -160,16 +162,16 @@ public class DummyCassandraReader implements CassandraReader {
String channelLower = channel.toLowerCase(); String channelLower = channel.toLowerCase();
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> { Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
if (channelLower.contains("waveform")) { if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray(); long[] value = random.longs(2048).toArray();
value[0] = i; value[0] = i;
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
KEYSPACE, KEYSPACE,
value value
); );
@ -182,11 +184,9 @@ public class DummyCassandraReader implements CassandraReader {
value[0] = i; value[0] = i;
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
KEYSPACE, KEYSPACE,
value, value,
shape shape
@ -194,11 +194,9 @@ public class DummyCassandraReader implements CassandraReader {
} else { } else {
return new ChannelEvent( return new ChannelEvent(
channel, channel,
i * 10, time,
0,
i, i,
i * 10, time,
0,
KEYSPACE, KEYSPACE,
i i
); );