diff --git a/.project b/.project
index 08328ec..56f8001 100644
--- a/.project
+++ b/.project
@@ -5,11 +5,6 @@
-
- org.eclipse.wst.common.project.facet.core.builder
-
-
-
org.eclipse.jdt.core.javabuilder
@@ -25,6 +20,5 @@
org.springframework.ide.eclipse.core.springnature
org.springsource.ide.eclipse.gradle.core.nature
org.eclipse.jdt.core.javanature
- org.eclipse.wst.common.project.facet.core.nature
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
index ffdb1ff..4d86e9f 100644
--- a/.settings/org.eclipse.jdt.core.prefs
+++ b/.settings/org.eclipse.jdt.core.prefs
@@ -1,5 +1,5 @@
#
-#Wed Feb 17 10:04:43 CET 2016
+#Tue Mar 08 14:20:11 CET 2016
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
diff --git a/Readme.md b/Readme.md
index bb4a15c..6f61a38 100644
--- a/Readme.md
+++ b/Readme.md
@@ -125,33 +125,25 @@ Queries are applied to a range. The following types of ranges ranges are support
```json
"range":{
"startDate":"2015-08-06T18:00:00.000",
- "startNanos":0,
"endDate":"2015-08-06T18:59:59.999",
- "endNanos":999999
}
```
- **startDate**: The start date of the time range (ISO8601 format (YYYY-MM-DDThh:mm:ss.sTZD e.g. 1997-07-16T19:20:30.475+02:00 (omitting +02:00 falls back to the local time zone)).
-- **[startNanos]**: The optional nanosecond offset to the milliseconds (range [0..999999]).
- **endDate**: The end date of the time range.
-- **[endNanos]**: The optional nanosecond offset.
### By Time
```json
"range":{
- "startMillis":0,
- "startNanos":0,
- "endMillis":100,
- "endNanos":999999
+ "startSeconds":"0.0",
+ "endSeconds":"1.000999999"
}
```
-- **startMillis**: The start time of the range in milliseconds since January 1, 1970 (the UNIX/JAVA epoch).
-- **[startNanos]**: The optional nanosecond offset to the milliseconds (range [0..999999]).
-- **endMillis**: The end time of the range.
-- **[endNanos]**: The optional nanosecond offset.
+- **startSeconds**: The start time of the range in seconds since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds.
+- **endSeconds**: The end time of the range in seconds since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds.
@@ -235,44 +227,36 @@ The following examples build on waveform data (see below). They also work for sc
"channel":"Channel_01",
"data":[
{
- "iocMillis":0,
- "iocNanos":0,
+ "iocSeconds":"0.000000000",
"pulseId":0,
- "globalMillis":0,
- "globalNanos":0,
+ "globalSeconds":"0.000000000",
"shape":[
4
],
"value":[1,2,3,4]
},
{
- "iocMillis":10,
- "iocNanos":0,
+ "iocSeconds":"0.010000000",
"pulseId":1,
- "globalMillis":10,
- "globalNanos":0,
+ "globalSeconds":"0.010000000",
"shape":[
4
],
"value":[2,3,4,5]
},
{
- "iocMillis":20,
- "iocNanos":0,
+ "iocSeconds":"0.020000000",
"pulseId":2,
- "globalMillis":20,
- "globalNanos":0,
+ "globalSeconds":"0.020000000",
"shape":[
4
],
"value":[3,4,5,6]
},
{
- "iocMillis":30,
- "iocNanos":0,
+ "iocSeconds":"0.030000000",
"pulseId":3,
- "globalMillis":30,
- "globalNanos":0,
+ "globalSeconds":"0.030000000",
"shape":[
4
],
@@ -316,10 +300,8 @@ See JSON representation of the data above.
```json
{
"range":{
- "startMillis":0,
- "startNanos":0,
- "endMillis":30,
- "endNanos":999999
+ "startSeconds":"0.0",
+ "endSeconds":"0.030999999"
},
"channels":[
"Channel_01"
@@ -330,7 +312,7 @@ See JSON representation of the data above.
###### Command
```bash
-curl -H "Content-Type: application/json" -X POST -d '{"range":{"startMillis":0,"startNanos":0,"endMillis":30,"endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
+curl -H "Content-Type: application/json" -X POST -d '{"range":{"startSeconds":"0.0","endSeconds":"0.030999999"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
```
###### Response
@@ -345,9 +327,7 @@ See JSON representation of the data above.
{
"range":{
"startDate":"1970-01-01T01:00:00.000",
- "startNanos":0,
- "endDate":"1970-01-01T01:00:00.030",
- "endNanos":999999
+ "endDate":"1970-01-01T01:00:00.030"
},
"channels":[
"Channel_01"
@@ -360,7 +340,7 @@ Supported format is ISO8601 *YYYY-MM-DDThh:mm:ss.sTZD* (e.g. *1997-07-16T19:20:3
###### Command
```bash
-curl -H "Content-Type: application/json" -X POST -d '{"range":{"startDate":"1970-01-01T01:00:00.000","startNanos":0,"endDate":"1970-01-01T01:00:00.030","endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
+curl -H "Content-Type: application/json" -X POST -d '{"range":{"startDate":"1970-01-01T01:00:00.000","endDate":"1970-01-01T01:00:00.030"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
```
###### Response
@@ -375,10 +355,8 @@ See JSON representation of the data above.
{
"backend":"archiverappliance",
"range":{
- "startMillis":0,
- "startNanos":0,
- "endMillis":30,
- "endNanos":999999
+ "startSeconds":"0.0",
+ "endSeconds":"0.030999999"
},
"channels":[
"Channel_01"
@@ -392,7 +370,7 @@ Archiver Appliance supports queries by *time range* and *date range* only (as it
###### Command
```bash
-curl -H "Content-Type: application/json" -X POST -d '{"dbmode":"archiverappliance","range":{"startMillis":0,"startNanos":0,"endMillis":30,"endNanos":999999},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
+curl -H "Content-Type: application/json" -X POST -d '{"dbmode":"archiverappliance","range":{"startSeconds":"0.0","endSeconds":"0.030999999"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
```
###### Response
@@ -453,10 +431,8 @@ curl --compressed -H "Content-Type: application/json" -X POST -d '{"compression"
"fields":[
"channel",
"pulseId",
- "iocMillis",
- "iocNanos",
- "globalMillis",
- "globalNanos",
+ "iocSeconds",
+ "globalSeconds",
"shape",
"eventCount",
"value"
@@ -464,10 +440,12 @@ curl --compressed -H "Content-Type: application/json" -X POST -d '{"compression"
}
```
+It is possible to request the time in seconds (since January 1, 1970 (the UNIX epoch) as a decimal value including fractional seconds - using fields *globalSeconds* and *iocSeconds*) or in milliseconds (since January 1, 1970 (the JAVA epoch) - using fields *globalMillis* and *iocMillis*)
+
###### Command
```bash
-curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","range":{"startPulseId":0,"endPulseId":4},"channels":["channel1","channel2"],"fields":["channel","pulseId","iocMillis","iocNanos","globalMillis","globalNanos","shape","eventCount","value"]}' http://data-api.psi.ch/sf/query
+curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","range":{"startPulseId":0,"endPulseId":4},"channels":["channel1","channel2"],"fields":["channel","pulseId","iocSeconds","globalSeconds","shape","eventCount","value"]}' http://data-api.psi.ch/sf/query
```
###### Response
@@ -475,17 +453,17 @@ curl -H "Content-Type: application/json" -X POST -d '{"responseFormat":"csv","ra
The response is in CSV.
```text
-channel;pulseId;iocMillis;iocNanos;globalMillis;globalNanos;shape;eventCount;value
-testChannel1;0;0;0;0;0;[1];1;0
-testChannel1;1;10;0;10;0;[1];1;1
-testChannel1;2;20;0;20;0;[1];1;2
-testChannel1;3;30;0;30;0;[1];1;3
-testChannel1;4;40;0;40;0;[1];1;4
-testChannel2;0;0;0;0;0;[1];1;0
-testChannel2;1;10;0;10;0;[1];1;1
-testChannel2;2;20;0;20;0;[1];1;2
-testChannel2;3;30;0;30;0;[1];1;3
-testChannel2;4;40;0;40;0;[1];1;4
+channel;pulseId;iocSeconds;globalSeconds;shape;eventCount;value
+testChannel1;0;0.000000000;0.000000000;[1];1;0
+testChannel1;1;0.010000000;0.010000000;[1];1;1
+testChannel1;2;0.020000000;0.020000000;[1];1;2
+testChannel1;3;0.030000000;0.030000000;[1];1;3
+testChannel1;4;0.040000000;0.040000000;[1];1;4
+testChannel2;0;0.000000000;0.000000000;[1];1;0
+testChannel2;1;0.010000000;0.010000000;[1];1;1
+testChannel2;2;0.020000000;0.020000000;[1];1;2
+testChannel2;3;0.030000000;0.030000000;[1];1;3
+testChannel2;4;0.040000000;0.040000000;[1];1;4
```
##### Querying for Specific Fields
@@ -735,8 +713,8 @@ Array value [aggregations](https://github.psi.ch/projects/ST/repos/ch.psi.daq.qu
"aggregations":["min","max","mean"],
"fields":["globalMillis","value"],
"range":{
- "globalMillis":0,
- "globalMillis":3
+ "startSeconds":"0.0",
+ "endSeconds":"0.030000000"
},
"channels":[
"Channel_01"
@@ -747,7 +725,7 @@ Array value [aggregations](https://github.psi.ch/projects/ST/repos/ch.psi.daq.qu
###### Command
```bash
-curl -H "Content-Type: application/json" -X POST -d '{"binSize":10,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["globalMillis","value"],"range":{"globalMillis":0,"globalMillis":3},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
+curl -H "Content-Type: application/json" -X POST -d '{"binSize":10,"aggregationType":"value","aggregations":["min","max","mean"],"fields":["globalMillis","value"],"range":{"startSeconds":"0.0","endSeconds":"0.030000000"},"channels":["Channel_01"]}' http://data-api.psi.ch/sf/query
```
###### Response
diff --git a/src/main/resources/queryrest.properties b/src/main/resources/queryrest.properties
index a21e89a..e26d646 100644
--- a/src/main/resources/queryrest.properties
+++ b/src/main/resources/queryrest.properties
@@ -1,6 +1,6 @@
# defines the fields that are included in the response
# if no fields have been specified by the user
-queryrest.default.response.fields=channel,pulseId,globalMillis,globalNanos,iocMillis,iocNanos,shape,eventCount,value
+queryrest.default.response.fields=channel,pulseId,globalSeconds,shape,eventCount,value
# aggregation which are included in the response by default if aggregation is enabled for a given query
queryrest.default.response.aggregations=min,mean,max
diff --git a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java b/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java
index f92b2ae..3ffcded 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/DaqWebMvcConfig.java
@@ -12,13 +12,11 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupp
import ch.psi.daq.cassandra.reader.CassandraReader;
import ch.psi.daq.cassandra.util.test.CassandraDataGen;
-import ch.psi.daq.domain.reader.DataReader;
import ch.psi.daq.query.processor.QueryProcessor;
import ch.psi.daq.query.processor.QueryProcessorLocal;
import ch.psi.daq.test.cassandra.admin.CassandraTestAdmin;
import ch.psi.daq.test.cassandra.admin.CassandraTestAdminImpl;
import ch.psi.daq.test.query.config.LocalQueryTestConfig;
-import ch.psi.daq.test.queryrest.query.DummyArchiverApplianceReader;
import ch.psi.daq.test.queryrest.query.DummyCassandraReader;
@Configuration
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
index 6e3fcec..e1cba45 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerCsvTest.java
@@ -29,6 +29,8 @@ import ch.psi.daq.cassandra.request.range.RequestRangePulseId;
import ch.psi.daq.cassandra.request.range.RequestRangeTime;
import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering;
+import ch.psi.daq.common.time.TimeUtils;
+import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.query.model.Aggregation;
import ch.psi.daq.query.model.AggregationType;
import ch.psi.daq.query.model.Compression;
@@ -79,14 +81,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -125,10 +127,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@@ -170,14 +176,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -218,10 +224,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@@ -257,14 +267,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -301,10 +311,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(channelName, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[2048]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertTrue(customerMap.get(QueryField.value.name()).toString().startsWith("["));
@@ -321,8 +335,8 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
public void testTimeRangeQuery() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeTime(
- 0,
- 10),
+ TimeUtils.getTimeFromMillis(0, 0),
+ TimeUtils.getTimeFromMillis(10, 0)),
TEST_CHANNEL_NAMES);
request.setResponseFormat(ResponseFormat.CSV);
@@ -332,14 +346,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -378,10 +392,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@@ -418,14 +436,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -464,10 +482,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL + channelCount, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("1", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse, customerMap.get(QueryField.value.name()));
@@ -567,14 +589,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -618,10 +640,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("5", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name()));
@@ -655,14 +681,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
cellProcessors.add(new NotNull());
queryFields.add(QueryField.pulseId);
cellProcessors.add(new NotNull());
+ queryFields.add(QueryField.iocSeconds);
+ cellProcessors.add(new NotNull());
queryFields.add(QueryField.iocMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.iocNanos);
+ queryFields.add(QueryField.globalSeconds);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.globalMillis);
cellProcessors.add(new NotNull());
- queryFields.add(QueryField.globalNanos);
- cellProcessors.add(new NotNull());
queryFields.add(QueryField.shape);
cellProcessors.add(new NotNull());
queryFields.add(QueryField.eventCount);
@@ -707,10 +733,14 @@ public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
while ((customerMap = mapReader.read(header, processors)) != null) {
assertEquals(TEST_CHANNEL_01, customerMap.get(QueryField.channel.name()));
assertEquals("" + pulse, customerMap.get(QueryField.pulseId.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.iocMillis.name()));
- assertEquals("0", customerMap.get(QueryField.iocNanos.name()));
- assertEquals("" + pulse * 10, customerMap.get(QueryField.globalMillis.name()));
- assertEquals("0", customerMap.get(QueryField.globalNanos.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.iocMillis.name()));
+ assertEquals("" + TimeUtils.getTimeStr(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalSeconds.name()));
+ assertEquals("" + TimeUtils.getMillis(TestTimeUtils.getTimeFromPulseId(pulse)),
+ customerMap.get(QueryField.globalMillis.name()));
assertEquals("[1]", customerMap.get(QueryField.shape.name()));
assertEquals("10", customerMap.get(QueryField.eventCount.name()));
assertEquals("" + pulse + ".0", customerMap.get(Aggregation.min.name()));
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
index 46b2011..e524925 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerJsonTest.java
@@ -15,10 +15,13 @@ import ch.psi.daq.cassandra.request.range.RequestRangePulseId;
import ch.psi.daq.cassandra.request.range.RequestRangeTime;
import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering;
+import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.reader.Backend;
+import ch.psi.daq.domain.test.TestTimeUtils;
import ch.psi.daq.query.model.AggregationType;
import ch.psi.daq.query.model.Compression;
+import ch.psi.daq.query.model.QueryField;
import ch.psi.daq.query.model.impl.DAQQueries;
import ch.psi.daq.query.model.impl.DAQQuery;
import ch.psi.daq.query.model.impl.DAQQueryElement;
@@ -67,16 +70,16 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("BoolScalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").value("BoolWaveform"))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").exists())
-// .andExpect(
-// MockMvcResultMatchers.jsonPath("$[1].channels[0]").value(DummyArchiverApplianceReader.TEST_CHANNEL_1))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[1]").exists())
-// .andExpect(
-// MockMvcResultMatchers.jsonPath("$[1].channels[1]").value(DummyArchiverApplianceReader.TEST_CHANNEL_2))
- ;
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").exists())
+ // .andExpect(
+ // MockMvcResultMatchers.jsonPath("$[1].channels[0]").value(DummyArchiverApplianceReader.TEST_CHANNEL_1))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[1]").exists())
+ // .andExpect(
+ // MockMvcResultMatchers.jsonPath("$[1].channels[1]").value(DummyArchiverApplianceReader.TEST_CHANNEL_2))
+ ;
}
@@ -100,11 +103,11 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").value("UInt32Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").value("UInt32Waveform"))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").doesNotExist())
- ;
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").doesNotExist())
+ ;
}
@Test
@@ -156,12 +159,12 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[23]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").doesNotExist())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[2]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").doesNotExist())
- ;
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[2]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").doesNotExist())
+ ;
// each reload add another channel
request.setReload(true);
@@ -182,12 +185,12 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[25]").doesNotExist())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[4]").doesNotExist())
- ;
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.archiverappliance.name()))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[3]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[4]").doesNotExist())
+ ;
}
@Test
@@ -279,9 +282,14 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
public void testPulseRangeQuery() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
- 10,
- 11),
+ 100,
+ 101),
TEST_CHANNEL_NAMES);
+ request.addField(QueryField.pulseId);
+ request.addField(QueryField.globalSeconds);
+ request.addField(QueryField.globalMillis);
+ request.addField(QueryField.iocSeconds);
+ request.addField(QueryField.iocMillis);
String content = mapper.writeValueAsString(request);
System.out.println(content);
@@ -297,25 +305,45 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].iocSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].iocMillis").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(1010))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].iocSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].iocMillis").value(1010))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].iocSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].iocMillis").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(1010))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocMillis").value(1010));
}
-
+
@Test
public void testPulseRangeQueryBackends() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
- 10,
- 11),
+ 100,
+ 101),
new ChannelName(TEST_CHANNEL_01, Backend.databuffer),
new ChannelName(TEST_CHANNEL_02, Backend.archiverappliance));
@@ -334,19 +362,21 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
-// .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110))
- ;
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
+ // .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110))
+ ;
}
@Test
@@ -355,13 +385,13 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
DAQQueries request = new DAQQueries(
new DAQQueryElement(
new RequestRangePulseId(
- 10,
- 11),
+ 100,
+ 101),
TEST_CHANNEL_NAMES),
new DAQQueryElement(
new RequestRangePulseId(
- 10,
- 11),
+ 100,
+ 101),
testChannel3));
String content = mapper.writeValueAsString(request);
@@ -380,33 +410,39 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][0].channel.name").value(TEST_CHANNEL_01))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].globalMillis").value(110))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].globalMillis").value(110))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0][1].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1][0].channel.name").value(testChannel3))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].globalMillis").value(110));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1][0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)));
}
@Test
public void testTimeRangeQuery() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangeTime(
- 100,
- 110),
+ TimeUtils.getTimeFromMillis(2000, 0),
+ TimeUtils.getTimeFromMillis(2010, 0)),
TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
@@ -423,24 +459,28 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(200))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(201))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").isMap())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(200))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(201))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(2, 10000000)));
}
@Test
public void testDateRangeQuery() throws Exception {
- String startDate = RequestRangeDate.format(100);
- String endDate = RequestRangeDate.format(110);
+ String startDate = RequestRangeDate.format(1000);
+ String endDate = RequestRangeDate.format(1010);
DAQQuery request = new DAQQuery(
new RequestRangeDate(
startDate,
@@ -464,18 +504,22 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value("databuffer"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(110))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value("databuffer"))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalMillis").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(11))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalMillis").value(110));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 10000000)));
}
@Test
@@ -520,8 +564,8 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
@Test
public void testDateRangeQueryNrOfBinsAggregate() throws Exception {
- long startTime = 100;
- long endTime = 199;
+ long startTime = 1000;
+ long endTime = 1099;
String startDate = RequestRangeDate.format(startTime);
String endDate = RequestRangeDate.format(endTime);
DAQQuery request = new DAQQuery(
@@ -549,18 +593,20 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(5))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(15))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(150))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(105))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(1, 50000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(5));
}
@Test
public void testDateRangeQueryBinSizeAggregate() throws Exception {
- long startTime = 1000;
- long endTime = 1999;
+ long startTime = 10000;
+ long endTime = 10999;
String startDate = RequestRangeDate.format(startTime);
String endDate = RequestRangeDate.format(endTime);
DAQQuery request = new DAQQuery(
@@ -588,35 +634,45 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(Backend.databuffer.name()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalMillis").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(1000))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 0)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(110))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalMillis").value(1100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(1010))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 100000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].pulseId").value(120))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].globalMillis").value(1200))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].pulseId").value(1020))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 200000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[2].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].pulseId").value(130))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].globalMillis").value(1300))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].pulseId").value(1030))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 300000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[3].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].pulseId").value(140))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].globalMillis").value(1400))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].pulseId").value(1040))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 400000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[4].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].pulseId").value(150))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].globalMillis").value(1500))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].pulseId").value(1050))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 500000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[5].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].pulseId").value(160))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].globalMillis").value(1600))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].pulseId").value(1060))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 600000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[6].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].pulseId").value(170))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].globalMillis").value(1700))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].pulseId").value(1070))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 700000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[7].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].pulseId").value(180))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].globalMillis").value(1800))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].pulseId").value(1080))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 800000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[8].eventCount").value(10))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].pulseId").value(190))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].globalMillis").value(1900))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].pulseId").value(1090))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].globalSeconds").value(
+ TestTimeUtils.getTimeStr(10, 900000000)))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[9].eventCount").value(10));
}
diff --git a/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java b/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
index 3a30f90..1f5d0fe 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/query/DummyArchiverApplianceReader.java
@@ -1,5 +1,6 @@
package ch.psi.daq.test.queryrest.query;
+import java.math.BigDecimal;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
@@ -10,6 +11,7 @@ import java.util.stream.Stream;
import com.google.common.collect.Lists;
import ch.psi.daq.common.ordering.Ordering;
+import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelEvent;
import ch.psi.daq.domain.reader.Backend;
@@ -51,25 +53,25 @@ public class DummyArchiverApplianceReader implements DataReader {
}
@Override
- public Stream extends DataEvent> getEventStream(String channel, long startMillis, long startNanos, long endMillis,
- long endNanos, Ordering ordering, boolean aggregateValues, String... columns) {
- return getElements(channel, startMillis / 10, endMillis / 10);
+ public Stream extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
+ Ordering ordering, boolean aggregateValues, String... columns) {
+ return getElements(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
}
protected Stream extends DataEvent> getElements(String channel, long start, long end) {
String channelLower = channel.toLowerCase();
Stream extends DataEvent> eventStream = LongStream.rangeClosed(start, end).mapToObj(i -> {
+ BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
+
if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray();
value[0] = i;
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
1,
value
);
@@ -82,11 +84,9 @@ public class DummyArchiverApplianceReader implements DataReader {
value[0] = i;
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
1,
value,
shape
@@ -94,11 +94,9 @@ public class DummyArchiverApplianceReader implements DataReader {
} else {
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
1,
i
);
diff --git a/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java b/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
index 8d033c6..6ad1035 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/query/DummyCassandraReader.java
@@ -3,6 +3,7 @@
*/
package ch.psi.daq.test.queryrest.query;
+import java.math.BigDecimal;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
@@ -21,6 +22,7 @@ import com.google.common.collect.Lists;
import ch.psi.daq.cassandra.reader.CassandraReader;
import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.ordering.Ordering;
+import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelConfiguration;
import ch.psi.daq.domain.cassandra.ChannelEvent;
@@ -107,9 +109,9 @@ public class DummyCassandraReader implements CassandraReader {
* @{inheritDoc
*/
@Override
- public Stream extends DataEvent> getEventStream(String channel, long startMillis, long startNanos, long endMillis,
- long endNanos, Ordering ordering, boolean aggregateValues, String... columns) {
- return getDummyEventStream(channel, startMillis / 10, endMillis / 10);
+ public Stream extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
+ Ordering ordering, boolean aggregateValues, String... columns) {
+ return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
}
/**
@@ -160,16 +162,16 @@ public class DummyCassandraReader implements CassandraReader {
String channelLower = channel.toLowerCase();
Stream extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
+ BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
+
if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray();
value[0] = i;
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
KEYSPACE,
value
);
@@ -182,11 +184,9 @@ public class DummyCassandraReader implements CassandraReader {
value[0] = i;
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
KEYSPACE,
value,
shape
@@ -194,11 +194,9 @@ public class DummyCassandraReader implements CassandraReader {
} else {
return new ChannelEvent(
channel,
- i * 10,
- 0,
+ time,
i,
- i * 10,
- 0,
+ time,
KEYSPACE,
i
);