diff --git a/Readme.md b/Readme.md
new file mode 100644
index 0000000..32ae7f0
--- /dev/null
+++ b/Readme.md
@@ -0,0 +1,142 @@
+#Overview
+
+This project provides a REST interface to execute queries on the databuffer.
+
+# Requirements
+
+This project requires Java 8 or greater.
+
+# Deployment
+
+Use the instructions provided by [ch.psi.daq.install](https://github.psi.ch/projects/ST/repos/ch.psi.daq.install/browse#query_rest) to install the application on a server.
+
+## Application Properties
+
+Following files define and describe application properties:
+* [Cassandra](https://github.psi.ch/projects/ST/repos/ch.psi.daq.cassandra/browse/src/main/resources/cassandra.properties) specific properties.
+* [Query](https://github.psi.ch/projects/ST/repos/ch.psi.daq.dispatcher/browse/src/main/resources/query.properties) specific properties..
+* [Query REST](https://github.psi.ch/projects/ST/repos/ch.psi.daq.queryrest/browse/src/main/resources/queryrest.properties) specific properties.
+
+It is possible to overwrite properties by defining new values in `${HOME}/.config/daq/queryrest.properties`
+
+
+
+# REST Interface
+
+
+
+## Query Channel Names
+
+### Request
+
+```
+GET http://:/channels
+
+or
+
+GET http://:/channels/{regex}
+```
+
+
+### Example
+
+```
+curl -H "Content-Type: application/json" -X GET http://sf-nube-14.psi.ch:8080/channels
+
+or
+
+curl -H "Content-Type: application/json" -X GET http://sf-nube-14.psi.ch:8080/channels/TRFCB
+```
+
+
+
+## Query Data
+
+### Request
+
+```
+GET http://:/query
+```
+
+### Example
+
+A request is performed using JSON. The JSON query defines the channels to be queried, the range, and how the data should be aggregated (this is optional but highly recommended).
+
+```
+curl -H "Content-Type: application/json" -X GET -d '{"channels":["channel1","channel2"],"startPulseId":0,"endPulseId":4}' http://sf-nube-14.psi.ch:8080/channels
+```
+
+### Response example
+
+The response is in JSON.
+
+```
+[
+ {
+ "channel":"channel1",
+ "data":[
+ {
+ "pulseId":0,
+ "iocMillis":0,
+ "iocNanos":0,
+ "globalMillis":0,
+ "globalNanos":0,
+ "value":0
+ },
+ {
+ "pulseId":2,
+ "iocMillis":2,
+ "iocNanos":2,
+ "globalMillis":2,
+ "globalNanos":2,
+ "value":2
+ },
+ {
+ "pulseId":4,
+ "iocMillis":4,
+ "iocNanos":4,
+ "globalMillis":4,
+ "globalNanos":4,
+ "value":4
+ }
+ ]
+ },
+ {
+ "channel":"channel2",
+ "data":[
+ {
+ "pulseId":1,
+ "iocMillis":1,
+ "iocNanos":1,
+ "globalMillis":1,
+ "globalNanos":1,
+ "value":1
+ },
+ {
+ "pulseId":3,
+ "iocMillis":3,
+ "iocNanos":3,
+ "globalMillis":3,
+ "globalNanos":3,
+ "value":3
+ }
+ ]
+ }
+]
+```
+
+### JSON Query
+
+Queries are defined using JSON.
+There exist following fields:
+- **channels:** Array of channel names to be queried.
+- **startPulseId and endPulseId:** A pulse-id range query with start and end pulse-id.
+- **startMillis/[startNanos] and endMillis/[endNanos]:** A time range query with start and end milliseconds since January 1, 1970 (the UNIX/JAVA epoch) and optionally supplemented with the nanosecond offset to the milliseconds (in the range [0..999999]).
+- **startDate/[startNanos] and endDate/[endNanos]:** A time range query with start and end date (in the format yyyy/MM/dd HH:mm:ss.SSS or dd.MM.yyyy HH:mm:ss.SSS) an optionally supplemented with the nanosecond offset to the milliseconds defined by the date string (in the range [0..999999]).
+- **ordering:** The ordering of the data (see [here](https://github.psi.ch/projects/ST/repos/ch.psi.daq.common/browse/src/main/java/ch/psi/daq/common/ordering/Ordering.java) for possible values).
+- **fields:** The requested fields (see [here](https://github.psi.ch/projects/ST/repos/ch.psi.daq.query/browse/src/main/java/ch/psi/daq/query/model/QueryField.java) for possible values).
+- **nrOfBins:** Activates data binning. Specifies the number of bins the pulse/time range should be devided into.
+- **binSize:** Activates data binning. Specifies the number of pulses per bin for pulse-range queries or the number of milliseconds per bin for time-range queries.
+- **aggregations:** Activates data aggregation. Array of requested aggregations (see [here](https://github.psi.ch/projects/ST/repos/ch.psi.daq.query/browse/src/main/java/ch/psi/daq/query/model/AggregationEnum.java) for possible values). These values will be added to the response.
+- **aggregationType:** Specifies the type of aggregation (see [here](https://github.psi.ch/projects/ST/repos/ch.psi.daq.query/browse/src/main/java/ch/psi/daq/query/model/AggregationType.java). The default type is *value* aggregation (e.g., sum([1,2,3])=6). Alternatively, it is possible to define *index* aggregation for multiple array in combination with binning (e.g., sum([1,2,3], [3,2,1]) = [4,4,4]).
+- **aggregateChannels:** Defines whether the data of the requested channel should be combined together using the defined aggregation (values: true|**false**)
diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
index 25a8053..1b1a9c7 100644
--- a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
+++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java
@@ -111,7 +111,7 @@ public class ResponseStreamWriter {
try {
generator.writeStartObject();
generator.writeStringField(QueryField.channel.name(), entry.getKey());
- generator.writeArrayFieldStart("values");
+ generator.writeArrayFieldStart("data");
entry.getValue()
/* ensure elements are sequentially written to the stream */
.sequential()
diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java
index 330987b..4dcb7c9 100644
--- a/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java
+++ b/src/test/java/ch/psi/daq/test/queryrest/controller/DaqRestControllerTest.java
@@ -10,6 +10,7 @@ import ch.psi.daq.query.model.PulseRangeQuery;
import ch.psi.daq.query.model.TimeRangeQuery;
import ch.psi.daq.query.model.TimeRangeQueryDate;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
+import ch.psi.daq.test.queryrest.query.DummyQueryProcessor;
/**
* Tests the {@link DaqController} implementation.
@@ -20,8 +21,8 @@ public class DaqRestControllerTest extends AbstractDaqRestTest {
public void testPulseRangeQuery() throws Exception {
PulseRangeQuery request = new PulseRangeQuery(
100,
- 101
- );
+ 101,
+ DummyQueryProcessor.TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
@@ -35,21 +36,22 @@ public class DaqRestControllerTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101));
}
@Test
public void testTimeRangeQuery() throws Exception {
TimeRangeQuery request = new TimeRangeQuery(
100,
- 101);
+ 101,
+ DummyQueryProcessor.TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
@@ -63,14 +65,14 @@ public class DaqRestControllerTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101));
}
@Test
@@ -79,7 +81,8 @@ public class DaqRestControllerTest extends AbstractDaqRestTest {
String endDate = TimeRangeQueryDate.format(101);
TimeRangeQueryDate request = new TimeRangeQueryDate(
startDate,
- endDate);
+ endDate,
+ DummyQueryProcessor.TEST_CHANNEL_NAMES);
String content = mapper.writeValueAsString(request);
@@ -93,13 +96,13 @@ public class DaqRestControllerTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").value("testChannel1"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[0].values[1].pulseId").value(101))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(101))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel").value("testChannel2"))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values").isArray())
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[0].pulseId").value(100))
- .andExpect(MockMvcResultMatchers.jsonPath("$[1].values[1].pulseId").value(101));
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
+ .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(101));
}
}