ChannelConfigurations Query

This commit is contained in:
Fabian Märki
2017-11-02 09:58:57 +01:00
parent 95ef786776
commit 4e9902b024
40 changed files with 1938 additions and 1356 deletions

View File

@ -73,7 +73,7 @@ POST https://<host>:<port>/channels
##### Explanation ##### Explanation
- **regex**: Reqular expression used to filter channel names. In case this value is undefined, no filter will be applied. Filtering is done using JAVA's [Pattern](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html), more precisely [Matcher.find()](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#find--)). - **regex**: Reqular expression used to filter channel names. In case this value is undefined, no filter will be applied. Filtering is done using JAVA's [Pattern](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html), more precisely [Matcher.find()](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#find--)).
- **backends**: Array of backends to access (values: sf-databuffer|sf-archiverappliance). In case this value is undefined, all backends will be queried for their channels. - **backends**: Array of backends to access (values: sf-databuffer|sf-imagebuffer|sf-archiverappliance). In case this value is undefined, all backends will be queried for their channels.
- **ordering**: The ordering of the channel names (values: **none**|asc|desc). - **ordering**: The ordering of the channel names (values: **none**|asc|desc).
- **reload**: Forces the server to reload cached channel names (values: **false**|true). - **reload**: Forces the server to reload cached channel names (values: **false**|true).

View File

@ -37,25 +37,27 @@ import ch.psi.daq.domain.events.ChannelConfiguration;
import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.ConfigField;
import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.operation.aggregation.extrema.AbstractExtremaMeta; import ch.psi.daq.domain.query.operation.aggregation.extrema.AbstractExtremaMeta;
import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.domain.request.validate.RequestProviderValidator; import ch.psi.daq.domain.request.validate.RequestProviderValidator;
import ch.psi.daq.query.analyzer.BackendQueryAnalyzerImpl; import ch.psi.daq.query.analyzer.BackendQueryAnalyzerImpl;
import ch.psi.daq.query.config.QueryConfig; import ch.psi.daq.query.config.QueryConfig;
import ch.psi.daq.queryrest.controller.validator.QueryValidator; import ch.psi.daq.queryrest.controller.validator.ConfigQueryValidator;
import ch.psi.daq.queryrest.model.ChannelRenameFilterMixin; import ch.psi.daq.queryrest.controller.validator.EventQueryValidator;
import ch.psi.daq.queryrest.model.HistoricChannelConfigurationPropertyFilterMixin;
import ch.psi.daq.queryrest.model.PropertyFilterMixin; import ch.psi.daq.queryrest.model.PropertyFilterMixin;
import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.query.QueryManagerImpl; import ch.psi.daq.queryrest.query.QueryManagerImpl;
import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn; import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn;
import ch.psi.daq.queryrest.response.csv.CSVResponseStreamWriter; import ch.psi.daq.queryrest.response.csv.CSVResponseStreamWriter;
import ch.psi.daq.queryrest.response.formatter.AnyResponseFormatter;
import ch.psi.daq.queryrest.response.formatter.DAQConfigQueryResponseFormatter;
import ch.psi.daq.queryrest.response.formatter.DAQQueriesResponseFormatter;
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter; import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter;
import ch.psi.daq.queryrest.response.msgpack.MsgPackResponseStreamWriter; import ch.psi.daq.queryrest.response.msgpack.MsgPackResponseStreamWriter;
import ch.psi.daq.queryrest.response.msgpack.MsgPackTableResponseStreamWriter;
import ch.psi.daq.queryrest.response.smile.SmileResponseStreamWriter; import ch.psi.daq.queryrest.response.smile.SmileResponseStreamWriter;
import ch.psi.daq.queryrest.response.smile.SmileTableResponseStreamWriter;
@Configuration @Configuration
@Import(value = DomainConfigCORS.class) @Import(value = DomainConfigCORS.class)
@ -65,9 +67,12 @@ import ch.psi.daq.queryrest.response.smile.SmileTableResponseStreamWriter;
"file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true) "file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true)
public class QueryRestConfig { // extends WebMvcConfigurerAdapter { public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
private static final String QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS = "queryrest.default.response.aggregations"; private static final String QUERYREST_RESPONSE_FIELDS_EVENT_QUERY = "queryrest.response.fields.event.query";
private static final String QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS =
"queryrest.response.fields.event.query.aggregations";
private static final String QUERYREST_DEFAULT_RESPONSE_FIELDS = "queryrest.default.response.fields"; private static final String QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY = "queryrest.response.fields.config.query";
private static final String QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC = "queryrest.response.fields.config.historic";
// a nested configuration // a nested configuration
// this guarantees that the ordering of the properties file is as expected // this guarantees that the ordering of the properties file is as expected
@ -87,13 +92,21 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
public static final String BEAN_NAME_QUERY_MANAGER = "queryManager"; public static final String BEAN_NAME_QUERY_MANAGER = "queryManager";
public static final String BEAN_NAME_QUERY_ANALIZER_FACTORY = "queryAnalizerFactory"; public static final String BEAN_NAME_QUERY_ANALIZER_FACTORY = "queryAnalizerFactory";
public static final String BEAN_NAME_QUERY_VALIDATOR = "queryValidator"; public static final String BEAN_NAME_EVENT_QUERY_VALIDATOR = "eventQueryValidator";
public static final String BEAN_NAME_CONFIG_QUERY_VALIDATOR = "configQueryValidator";
public static final String BEAN_NAME_REQUEST_PROVIDER_VALIDATOR = "requestProviderValidator"; public static final String BEAN_NAME_REQUEST_PROVIDER_VALIDATOR = "requestProviderValidator";
public static final String BEAN_NAME_JSON_FACTORY = "jsonFactory"; public static final String BEAN_NAME_JSON_FACTORY = "jsonFactory";
public static final String BEAN_NAME_MSG_PACK_FACTORY = "msgPackFactory"; public static final String BEAN_NAME_MSG_PACK_FACTORY = "msgPackFactory";
public static final String BEAN_NAME_SMILE_FACTORY = "smileFactory"; public static final String BEAN_NAME_SMILE_FACTORY = "smileFactory";
public static final String BEAN_NAME_DEFAULT_RESPONSE_FIELDS = "defaultResponseFields"; public static final String BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS = "defaultEventResponseFields";
public static final String BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS = "defaultResponseAggregations"; public static final String BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS = "defaultEventResponseAggregations";
public static final String BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY = "configResponseFieldsQuery";
public static final String BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC = "configResponseFieldsHistoric";
public static final String BEAN_NAME_FORMATTER_DAQ_QUERIES = "formatterDAQQueries";
public static final String BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY = "formatterDAQConfigQuery";
public static final String BEAN_NAME_FORMATTER_ANY = "formatterAny";
public static final String BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION =
"formatterHistoricChannelConfiguration";
@Resource @Resource
private ApplicationContext context; private ApplicationContext context;
@ -127,8 +140,7 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
objectMapper.addMixIn(AbstractExtremaMeta.class, PropertyFilterMixin.class); objectMapper.addMixIn(AbstractExtremaMeta.class, PropertyFilterMixin.class);
objectMapper.addMixIn(EnumMap.class, PropertyFilterMixin.class); objectMapper.addMixIn(EnumMap.class, PropertyFilterMixin.class);
objectMapper.addMixIn(ChannelConfiguration.class, PropertyFilterMixin.class); objectMapper.addMixIn(ChannelConfiguration.class, HistoricChannelConfigurationPropertyFilterMixin.class);
objectMapper.addMixIn(ChannelConfiguration.class, ChannelRenameFilterMixin.class);
objectMapper.addMixIn(Response.class, PolymorphicResponseMixIn.class); objectMapper.addMixIn(Response.class, PolymorphicResponseMixIn.class);
} }
@ -188,36 +200,18 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
return new JSONResponseStreamWriter(); return new JSONResponseStreamWriter();
} }
@Bean
@Lazy
public JSONTableResponseStreamWriter jsonTableResponseStreamWriter() {
return new JSONTableResponseStreamWriter();
}
@Bean @Bean
@Lazy @Lazy
public MsgPackResponseStreamWriter msgPackResponseStreamWriter() { public MsgPackResponseStreamWriter msgPackResponseStreamWriter() {
return new MsgPackResponseStreamWriter(); return new MsgPackResponseStreamWriter();
} }
@Bean
@Lazy
public MsgPackTableResponseStreamWriter msgPackTableResponseStreamWriter() {
return new MsgPackTableResponseStreamWriter();
}
@Bean @Bean
@Lazy @Lazy
public SmileResponseStreamWriter smileResponseStreamWriter() { public SmileResponseStreamWriter smileResponseStreamWriter() {
return new SmileResponseStreamWriter(); return new SmileResponseStreamWriter();
} }
@Bean
@Lazy
public SmileTableResponseStreamWriter smileTableResponseStreamWriter() {
return new SmileTableResponseStreamWriter();
}
@Bean @Bean
@Lazy @Lazy
public CSVResponseStreamWriter csvResponseStreamWriter() { public CSVResponseStreamWriter csvResponseStreamWriter() {
@ -230,32 +224,73 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
return new QueryManagerImpl(); return new QueryManagerImpl();
} }
@Bean(name = BEAN_NAME_DEFAULT_RESPONSE_FIELDS) @Bean(name = BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS)
@Lazy @Lazy
public Set<QueryField> defaultResponseFields() { public Set<EventField> defaultEventResponseFields() {
String[] responseFields = String[] responseFields =
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_FIELDS)); StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_EVENT_QUERY));
LOGGER.debug("Load '{}={}'", BEAN_NAME_DEFAULT_RESPONSE_FIELDS, Arrays.toString(responseFields)); LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_EVENT_QUERY, Arrays.toString(responseFields));
// preserve order // preserve order
LinkedHashSet<QueryField> defaultResponseFields = new LinkedHashSet<>(responseFields.length); LinkedHashSet<EventField> defaultResponseFields = new LinkedHashSet<>(responseFields.length);
for (String field : responseFields) { for (String field : responseFields) {
try { try {
defaultResponseFields.add(QueryField.valueOf(field)); defaultResponseFields.add(EventField.valueOf(field));
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_DEFAULT_RESPONSE_FIELDS, e); LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_EVENT_QUERY, e);
} }
} }
return defaultResponseFields; return defaultResponseFields;
} }
@Bean(name = BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS) @Bean(name = BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY)
@Lazy
public Set<ConfigField> configResponseFieldsQuery() {
String[] responseFields =
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY));
LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY, Arrays.toString(responseFields));
// preserve order
LinkedHashSet<ConfigField> extractedResponseFields = new LinkedHashSet<>(responseFields.length);
for (String field : responseFields) {
try {
extractedResponseFields.add(ConfigField.valueOf(field));
} catch (Exception e) {
LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY, e);
}
}
return extractedResponseFields;
}
@Bean(name = BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC)
@Lazy
public Set<ConfigField> configResponseFieldsHistoric() {
String[] responseFields =
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC));
LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC, Arrays.toString(responseFields));
// preserve order
LinkedHashSet<ConfigField> extractedResponseFields = new LinkedHashSet<>(responseFields.length);
for (String field : responseFields) {
try {
extractedResponseFields.add(ConfigField.valueOf(field));
} catch (Exception e) {
LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC, e);
}
}
return extractedResponseFields;
}
@Bean(name = BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS)
@Lazy @Lazy
public Set<Aggregation> defaultResponseAggregations() { public Set<Aggregation> defaultResponseAggregations() {
String[] responseAggregations = String[] responseAggregations =
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS)); StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS));
LOGGER.debug("Load '{}={}'", BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Arrays.toString(responseAggregations)); LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS,
Arrays.toString(responseAggregations));
// preserve order // preserve order
LinkedHashSet<Aggregation> defaultResponseAggregations = new LinkedHashSet<>(responseAggregations.length); LinkedHashSet<Aggregation> defaultResponseAggregations = new LinkedHashSet<>(responseAggregations.length);
@ -263,7 +298,8 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
try { try {
defaultResponseAggregations.add(Aggregation.valueOf(aggregation)); defaultResponseAggregations.add(Aggregation.valueOf(aggregation));
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("Aggregation '{}' in '{}' is invalid.", aggregation, QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS, LOGGER.error("Aggregation '{}' in '{}' is invalid.", aggregation,
QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS,
e); e);
} }
} }
@ -271,10 +307,16 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
return defaultResponseAggregations; return defaultResponseAggregations;
} }
@Bean(name = BEAN_NAME_QUERY_VALIDATOR) @Bean(name = BEAN_NAME_EVENT_QUERY_VALIDATOR)
@Lazy @Lazy
public Validator queryValidator() { public Validator eventQueryValidator() {
return new QueryValidator(); return new EventQueryValidator();
}
@Bean(name = BEAN_NAME_CONFIG_QUERY_VALIDATOR)
@Lazy
public Validator configQueryValidator() {
return new ConfigQueryValidator();
} }
@Bean(name = BEAN_NAME_REQUEST_PROVIDER_VALIDATOR) @Bean(name = BEAN_NAME_REQUEST_PROVIDER_VALIDATOR)
@ -282,4 +324,32 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
public Validator requestProviderValidator() { public Validator requestProviderValidator() {
return new RequestProviderValidator(); return new RequestProviderValidator();
} }
@Bean(name = BEAN_NAME_FORMATTER_DAQ_QUERIES)
@Lazy
public DAQQueriesResponseFormatter daqQueriesFormatter() {
return new DAQQueriesResponseFormatter();
}
@Bean(name = BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY)
@Lazy
public DAQConfigQueryResponseFormatter daqConfigQueryFormatter() {
return new DAQConfigQueryResponseFormatter();
}
@Bean(name = BEAN_NAME_FORMATTER_ANY)
@Lazy
public AnyResponseFormatter anyFormatter() {
return new AnyResponseFormatter(
BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS,
BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC);
}
@Bean(name = BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION)
@Lazy
public AnyResponseFormatter historicChannelConfigurationFormatter() {
return new AnyResponseFormatter(
BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS,
BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC);
}
} }

View File

@ -3,14 +3,16 @@ package ch.psi.daq.queryrest.controller;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid; import javax.validation.Valid;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
@ -37,16 +39,19 @@ import com.google.common.collect.Lists;
import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.channels.info.ChannelInfos; import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.ChannelNameRequest; import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.DAQConfigQueryElement;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest;
import ch.psi.daq.domain.query.channels.ChannelsRequest; import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.channels.ChannelsResponse;
import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.domain.query.transform.image.color.ColorModelType; import ch.psi.daq.domain.query.transform.image.color.ColorModelType;
@ -55,6 +60,9 @@ import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn; import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn;
import ch.psi.daq.queryrest.response.formatter.AnyResponseFormatter;
import ch.psi.daq.queryrest.response.formatter.DAQConfigQueryResponseFormatter;
import ch.psi.daq.queryrest.response.formatter.DAQQueriesResponseFormatter;
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
@RestController @RestController
@ -66,9 +74,14 @@ public class QueryRestController implements ApplicationContextAware {
private ApplicationContext context; private ApplicationContext context;
private ObjectMapper objectMapper; private ObjectMapper objectMapper;
private QueryManager queryManager; private QueryManager queryManager;
private Validator queryValidator; private Validator eventQueryValidator;
private Validator configQueryValidator;
private Validator requestProviderValidator; private Validator requestProviderValidator;
private Response defaultResponse = new JSONHTTPResponse(); private Response defaultResponse = new JSONHTTPResponse();
private AnyResponseFormatter anyFormatter;
private AnyResponseFormatter historicConfigFormatter;
private DAQConfigQueryResponseFormatter configQueryFormatter;
private DAQQueriesResponseFormatter daqQueriesFormatter;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Override @Override
@ -80,8 +93,17 @@ public class QueryRestController implements ApplicationContextAware {
activeBackends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS_ACTIVE, Set.class); activeBackends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS_ACTIVE, Set.class);
objectMapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); objectMapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
queryManager = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_MANAGER, QueryManager.class); queryManager = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_MANAGER, QueryManager.class);
queryValidator = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_VALIDATOR, Validator.class); eventQueryValidator = context.getBean(QueryRestConfig.BEAN_NAME_EVENT_QUERY_VALIDATOR, Validator.class);
configQueryValidator = context.getBean(QueryRestConfig.BEAN_NAME_CONFIG_QUERY_VALIDATOR, Validator.class);
requestProviderValidator = context.getBean(QueryRestConfig.BEAN_NAME_REQUEST_PROVIDER_VALIDATOR, Validator.class); requestProviderValidator = context.getBean(QueryRestConfig.BEAN_NAME_REQUEST_PROVIDER_VALIDATOR, Validator.class);
anyFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_ANY, AnyResponseFormatter.class);
historicConfigFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION,
AnyResponseFormatter.class);
configQueryFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY,
DAQConfigQueryResponseFormatter.class);
daqQueriesFormatter =
context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_DAQ_QUERIES, DAQQueriesResponseFormatter.class);
} }
@InitBinder @InitBinder
@ -90,52 +112,126 @@ public class QueryRestController implements ApplicationContextAware {
if (requestProviderValidator.supports(binder.getTarget().getClass())) { if (requestProviderValidator.supports(binder.getTarget().getClass())) {
binder.addValidators(requestProviderValidator); binder.addValidators(requestProviderValidator);
} }
if (queryValidator.supports(binder.getTarget().getClass())) { if (eventQueryValidator.supports(binder.getTarget().getClass())) {
binder.addValidators(queryValidator); binder.addValidators(eventQueryValidator);
}
if (configQueryValidator.supports(binder.getTarget().getClass())) {
binder.addValidators(configQueryValidator);
} }
} }
} }
@RequestMapping(value = DomainConfig.PATH_CHANNELS, method = {RequestMethod.GET, RequestMethod.POST},
produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<ChannelsResponse> getChannels(@RequestBody(required = false) ChannelsRequest request)
throws Throwable {
List<ChannelsResponse> channels = queryManager.getChannels(request);
channels = channels.stream()
.filter(channelsResponse -> activeBackends.contains(channelsResponse.getBackend()))
.collect(Collectors.toList());
return channels;
}
/**
* Query specific channel names, and return only those.
*
* @param channelName part of (or full) channel name
* @return Collection of channel names matching the specified input channel name
* @throws Throwable in case something goes wrong
*/
@RequestMapping(value = DomainConfig.PATH_CHANNELS + "/{channelName}", method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody Collection<ChannelsResponse> getChannels(
@PathVariable(value = "channelName") String channelName)
throws Throwable {
return getChannels(new ChannelsRequest(channelName));
}
/**
* Queries for channels info
*
* @param request the ChannelNameRequest
* @return Collection of ChannelInfos
* @throws Throwable in case something goes wrong
*/
@RequestMapping( @RequestMapping(
value = DomainConfig.PATH_CHANNELS_INFO, value = DomainConfig.PATH_CHANNELS,
method = {RequestMethod.GET, RequestMethod.POST},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannels(@RequestBody(required = false) ChannelsRequest request,
final HttpServletResponse res)
throws Throwable {
((AbstractHTTPResponse) defaultResponse).respond(
context,
res,
null,
queryManager.getChannels(request),
anyFormatter);
}
@RequestMapping(
value = DomainConfig.PATH_CHANNELS + "/{channel}",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannels(
@PathVariable(value = "channel") final String channelName, final HttpServletResponse res)
throws Throwable {
getChannels(new ChannelsRequest(channelName), res);
}
@RequestMapping(
value = DomainConfig.PATH_CHANNELS_CONFIG,
method = {RequestMethod.GET, RequestMethod.POST},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannelConfigurations(@RequestBody(required = false) ChannelConfigurationsRequest request,
final HttpServletResponse res) throws Throwable {
((AbstractHTTPResponse) defaultResponse).respond(
context,
res,
null,
queryManager.getChannelConfigurations(request),
historicConfigFormatter);
}
@RequestMapping(
value = DomainConfig.PATH_CHANNELS_CONFIG + "/{channel}",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannelConfigurations(
@PathVariable(value = "channel") final String channelName, final HttpServletResponse res)
throws Throwable {
getChannelConfigurations(new ChannelConfigurationsRequest(channelName), res);
}
@RequestMapping(
value = DomainConfig.PATH_CHANNEL_CONFIG,
method = {RequestMethod.POST},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannelConfiguration(@RequestBody final ChannelName channelName, final HttpServletResponse res)
throws Throwable {
((AbstractHTTPResponse) defaultResponse).respond(
context,
res,
null,
queryManager.getChannelConfiguration(channelName),
historicConfigFormatter);
}
@RequestMapping(
value = DomainConfig.PATH_CHANNEL_CONFIG + "/{channel}",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE})
public void getChannelConfiguration(
@PathVariable(value = "channel") final String channelName, final HttpServletResponse res)
throws Throwable {
getChannelConfiguration(new ChannelName(channelName), res);
}
@RequestMapping(
value = DomainConfig.PATH_QUERY_CONFIG,
method = RequestMethod.POST, method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE}) consumes = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody Collection<ChannelInfos> executeChannelInfoQuery(@RequestBody ChannelNameRequest request) public void executeDAQConfigQuery(@RequestBody @Valid final DAQConfigQuery query, final HttpServletResponse res)
throws Throwable { throws Throwable {
return queryManager.getChannelInfos(request); try {
LOGGER.debug("Executing queries '{}'", query);
final Response response = query.getResponseOrDefault(defaultResponse);
if (response instanceof AbstractHTTPResponse) {
LOGGER.debug("Executing config query '{}'", query);
final AbstractHTTPResponse httpResponse = ((AbstractHTTPResponse) response);
httpResponse.validateQuery(query);
// execute query
final Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> result =
queryManager.queryConfigs(query);
httpResponse.respond(
context,
res,
query,
result,
configQueryFormatter);
} else {
final String message =
String.format(
"Expecting Response of type '%s' but received '%s'. Check JSON deserialization defined in '%s'",
AbstractHTTPResponse.class.getName(), response.getClass().getName(),
PolymorphicResponseMixIn.class.getName());
LOGGER.error(message);
throw new IllegalArgumentException(message);
}
} catch (Exception e) {
LOGGER.error("Failed to execute config query '{}'.", query, e);
throw e;
}
} }
/** /**
@ -144,13 +240,12 @@ public class QueryRestController implements ApplicationContextAware {
* @param jsonBody The {@link DAQQuery} properties sent as a JSON string, i.e. this is the * @param jsonBody The {@link DAQQuery} properties sent as a JSON string, i.e. this is the
* stringified body of the POST request method * stringified body of the POST request method
* @param res the current {@link HttpServletResponse} instance * @param res the current {@link HttpServletResponse} instance
* @throws Exception if reading the JSON string fails or if the subsequent call to * @throws Exception if reading the JSON string fails or if the subsequent call fails
* {@link #executeQuery(DAQQuery, HttpServletResponse)} fails
*/ */
@RequestMapping( @RequestMapping(
value = DomainConfig.PATH_QUERY, value = DomainConfig.PATH_QUERY,
method = RequestMethod.GET) method = RequestMethod.GET)
public void executeQueryBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { public void executeDAQQueryBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception {
DAQQuery query; DAQQuery query;
try { try {
query = objectMapper.readValue(jsonBody, DAQQuery.class); query = objectMapper.readValue(jsonBody, DAQQuery.class);
@ -165,13 +260,13 @@ public class QueryRestController implements ApplicationContextAware {
if (requestProviderValidator.supports(query.getClass())) { if (requestProviderValidator.supports(query.getClass())) {
requestProviderValidator.validate(query, errors); requestProviderValidator.validate(query, errors);
} }
if (queryValidator.supports(query.getClass())) { if (eventQueryValidator.supports(query.getClass())) {
queryValidator.validate(query, errors); eventQueryValidator.validate(query, errors);
} }
final List<ObjectError> allErrors = errors.getAllErrors(); final List<ObjectError> allErrors = errors.getAllErrors();
if (allErrors.isEmpty()) { if (allErrors.isEmpty()) {
executeQuery(query, res); executeDAQQuery(query, res);
} else { } else {
final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString()); final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString());
LOGGER.error(message); LOGGER.error(message);
@ -190,8 +285,8 @@ public class QueryRestController implements ApplicationContextAware {
value = DomainConfig.PATH_QUERY, value = DomainConfig.PATH_QUERY,
method = RequestMethod.POST, method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE}) consumes = {MediaType.APPLICATION_JSON_VALUE})
public void executeQuery(@RequestBody @Valid DAQQuery query, HttpServletResponse res) throws Exception { public void executeDAQQuery(@RequestBody @Valid DAQQuery query, HttpServletResponse res) throws Exception {
executeQueries(new DAQQueries(query), res); executeDAQQueries(new DAQQueries(query), res);
} }
/** /**
@ -200,13 +295,12 @@ public class QueryRestController implements ApplicationContextAware {
* @param jsonBody The {@link DAQQueries} properties sent as a JSON string, i.e. this is the * @param jsonBody The {@link DAQQueries} properties sent as a JSON string, i.e. this is the
* stringified body of the POST request method * stringified body of the POST request method
* @param res the current {@link HttpServletResponse} instance * @param res the current {@link HttpServletResponse} instance
* @throws Exception if reading the JSON string fails or if the subsequent call to * @throws Exception if reading the JSON string fails or if the subsequent call fails
* {@link #executeQueries(DAQQueries, HttpServletResponse)} fails
*/ */
@RequestMapping( @RequestMapping(
value = DomainConfig.PATH_QUERIES, value = DomainConfig.PATH_QUERIES,
method = RequestMethod.GET) method = RequestMethod.GET)
public void executeQueriesBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { public void executeDAQQueriesBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception {
DAQQueries queries; DAQQueries queries;
try { try {
queries = objectMapper.readValue(jsonBody, DAQQueries.class); queries = objectMapper.readValue(jsonBody, DAQQueries.class);
@ -221,13 +315,13 @@ public class QueryRestController implements ApplicationContextAware {
if (requestProviderValidator.supports(queries.getClass())) { if (requestProviderValidator.supports(queries.getClass())) {
requestProviderValidator.validate(queries, errors); requestProviderValidator.validate(queries, errors);
} }
if (queryValidator.supports(queries.getClass())) { if (eventQueryValidator.supports(queries.getClass())) {
queryValidator.validate(queries, errors); eventQueryValidator.validate(queries, errors);
} }
final List<ObjectError> allErrors = errors.getAllErrors(); final List<ObjectError> allErrors = errors.getAllErrors();
if (allErrors.isEmpty()) { if (allErrors.isEmpty()) {
executeQueries(queries, res); executeDAQQueries(queries, res);
} else { } else {
final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString()); final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString());
LOGGER.error(message); LOGGER.error(message);
@ -251,15 +345,28 @@ public class QueryRestController implements ApplicationContextAware {
value = DomainConfig.PATH_QUERIES, value = DomainConfig.PATH_QUERIES,
method = RequestMethod.POST, method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE}) consumes = {MediaType.APPLICATION_JSON_VALUE})
public void executeQueries(@RequestBody @Valid DAQQueries queries, HttpServletResponse res) throws Exception { public void executeDAQQueries(@RequestBody @Valid DAQQueries queries, HttpServletResponse res) throws Exception {
try { try {
LOGGER.debug("Executing queries '{}'", queries); LOGGER.debug("Executing queries '{}'", queries);
Response response = queries.getResponseOrDefault(defaultResponse); final Response response = queries.getResponseOrDefault(defaultResponse);
if (response instanceof AbstractHTTPResponse) { if (response instanceof AbstractHTTPResponse) {
((AbstractHTTPResponse) response).respond(context, queries, res); LOGGER.debug("Executing query '{}'", queries);
final AbstractHTTPResponse httpResponse = ((AbstractHTTPResponse) response);
httpResponse.validateQuery(queries);
// execute query
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
queryManager.queryEvents(queries);
httpResponse.respond(
context,
res,
queries,
result,
daqQueriesFormatter);
} else { } else {
String message = final String message =
String.format( String.format(
"Expecting Response of type '%s' but received '%s'. Check JSON deserialization defined in '%s'", "Expecting Response of type '%s' but received '%s'. Check JSON deserialization defined in '%s'",
AbstractHTTPResponse.class.getName(), response.getClass().getName(), AbstractHTTPResponse.class.getName(), response.getClass().getName(),
@ -267,8 +374,6 @@ public class QueryRestController implements ApplicationContextAware {
LOGGER.error(message); LOGGER.error(message);
throw new IllegalArgumentException(message); throw new IllegalArgumentException(message);
} }
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("Failed to execute query '{}'.", queries, e); LOGGER.error("Failed to execute query '{}'.", queries, e);
throw e; throw e;
@ -280,7 +385,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link Ordering}s as String array * @return list of {@link Ordering}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/ordering", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/ordering",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<Ordering> getOrderingValues() { public @ResponseBody List<Ordering> getOrderingValues() {
return Lists.newArrayList(Ordering.values()); return Lists.newArrayList(Ordering.values());
@ -291,21 +398,25 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link Ordering}s as String array * @return list of {@link Ordering}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/responseformat", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/responseformat",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<ResponseFormat> getResponseFormatValues() { public @ResponseBody List<ResponseFormat> getResponseFormatValues() {
return Lists.newArrayList(ResponseFormat.values()); return Lists.newArrayList(ResponseFormat.values());
} }
/** /**
* Returns the current list of {@link QueryField}s available. * Returns the current list of {@link EventField}s available.
* *
* @return list of {@link QueryField}s as String array * @return list of {@link EventField}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/queryfields", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/queryfields",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<QueryField> getQueryFieldValues() { public @ResponseBody List<EventField> getQueryFieldValues() {
return Arrays.stream(QueryField.values()) return Arrays.stream(EventField.values())
.filter(queryField -> queryField.isPublish()) .filter(queryField -> queryField.isPublish())
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
@ -315,7 +426,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link Aggregation}s as String array * @return list of {@link Aggregation}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregations", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregations",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<Aggregation> getAggregationValues() { public @ResponseBody List<Aggregation> getAggregationValues() {
return Lists.newArrayList(Aggregation.values()); return Lists.newArrayList(Aggregation.values());
@ -326,7 +439,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link AggregationType}s as String array * @return list of {@link AggregationType}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregationtypes", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregationtypes",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<AggregationType> getAggregationTypeValues() { public @ResponseBody List<AggregationType> getAggregationTypeValues() {
return Lists.newArrayList(AggregationType.values()); return Lists.newArrayList(AggregationType.values());
@ -337,7 +452,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link Backend}s as String array * @return list of {@link Backend}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_BACKENDS, method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_BACKENDS,
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<Backend> getBackendValues() { public @ResponseBody List<Backend> getBackendValues() {
return Backend.getBackends().stream() return Backend.getBackends().stream()
@ -350,7 +467,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link Compression}s as String array * @return list of {@link Compression}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/compression", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/compression",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<Compression> getCompressionValues() { public @ResponseBody List<Compression> getCompressionValues() {
return Lists.newArrayList(Compression.values()); return Lists.newArrayList(Compression.values());
@ -361,7 +480,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link ValueAggregation}s as String array * @return list of {@link ValueAggregation}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/valueaggregations", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/valueaggregations",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<ValueAggregation> getValueAggregations() { public @ResponseBody List<ValueAggregation> getValueAggregations() {
return Lists.newArrayList(ValueAggregation.values()); return Lists.newArrayList(ValueAggregation.values());
@ -372,7 +493,9 @@ public class QueryRestController implements ApplicationContextAware {
* *
* @return list of {@link ValueAggregation}s as String array * @return list of {@link ValueAggregation}s as String array
*/ */
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/colormodeltypes", method = {RequestMethod.GET}, @RequestMapping(
value = DomainConfig.PATH_PARAMETERS_ROOT + "/colormodeltypes",
method = {RequestMethod.GET},
produces = {MediaType.APPLICATION_JSON_VALUE}) produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody List<ColorModelType> getColorModelTypes() { public @ResponseBody List<ColorModelType> getColorModelTypes() {
return Lists.newArrayList(ColorModelType.values()); return Lists.newArrayList(ColorModelType.values());

View File

@ -0,0 +1,48 @@
package ch.psi.daq.queryrest.controller.validator;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.operation.ConfigField;
import ch.psi.daq.queryrest.config.QueryRestConfig;
public class ConfigQueryValidator implements Validator, ApplicationContextAware {
private Set<ConfigField> queryResponseFields;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext();
queryResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY, Set.class);
}
@Override
public boolean supports(final Class<?> clazz) {
return DAQConfigQuery.class.isAssignableFrom(clazz);
}
@Override
public void validate(final Object target, final Errors errors) {
if (target instanceof DAQConfigQuery) {
this.checkElement((DAQConfigQuery) target, errors);
}
}
private void checkElement(final DAQConfigQuery query, final Errors errors) {
// set default values (if not set)
if (query.getFields() == null || query.getFields().isEmpty()) {
query.setFields(new LinkedHashSet<>(queryResponseFields));
}
}
}

View File

@ -16,14 +16,14 @@ import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.transform.ExecutionEnvironment; import ch.psi.daq.domain.query.transform.ExecutionEnvironment;
import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.query.transform.ValueTransformationSequence;
import ch.psi.daq.domain.request.Request; import ch.psi.daq.domain.request.Request;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
public class QueryValidator implements Validator, ApplicationContextAware { public class EventQueryValidator implements Validator, ApplicationContextAware {
private Set<QueryField> defaultResponseFields; private Set<EventField> defaultResponseFields;
private Set<Aggregation> defaultResponseAggregations; private Set<Aggregation> defaultResponseAggregations;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -32,8 +32,8 @@ public class QueryValidator implements Validator, ApplicationContextAware {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext(); context = backend.getApplicationContext();
defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS, Set.class); defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS, Set.class);
defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class); defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class);
} }
@Override @Override
@ -97,7 +97,7 @@ public class QueryValidator implements Validator, ApplicationContextAware {
if (query.getValueTransformations() != null && !query.getValueTransformations().isEmpty()) { if (query.getValueTransformations() != null && !query.getValueTransformations().isEmpty()) {
// without this field, json will not contain transformedValue // without this field, json will not contain transformedValue
query.addField(QueryField.transformedValue); query.addField(EventField.transformedValue);
for (final ValueTransformationSequence transformationSequence : query.getValueTransformations()) { for (final ValueTransformationSequence transformationSequence : query.getValueTransformations()) {
transformationSequence.setExecutionEnvironment(ExecutionEnvironment.QUERYING); transformationSequence.setExecutionEnvironment(ExecutionEnvironment.QUERYING);

View File

@ -1,15 +0,0 @@
package ch.psi.daq.queryrest.model;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Kind of marker for ObjectMapper MixIn
*/
@JsonFilter("channelRenameFilter")
public interface ChannelRenameFilterMixin {
public static final String FILTER_NAME = "channelRenameFilter";
@JsonProperty("name")
String getChannel();
}

View File

@ -0,0 +1,12 @@
package ch.psi.daq.queryrest.model;
import com.fasterxml.jackson.annotation.JsonFilter;
/**
* Kind of marker for ObjectMapper MixIn. Use this instead of to make sure we can use different
* properties in case ChannelEvents and ChannelConfigurations are mixed in a response.
*/
@JsonFilter("historicChannelConfigurationPropertyFilter")
public class HistoricChannelConfigurationPropertyFilterMixin {
public static final String FILTER_NAME = "historicChannelConfigurationPropertyFilter";
}

View File

@ -1,27 +1,33 @@
package ch.psi.daq.queryrest.query; package ch.psi.daq.queryrest.query;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
import ch.psi.daq.domain.events.ChannelConfiguration;
import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.json.channels.info.ChannelInfos; import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.ChannelNameRequest; import ch.psi.daq.domain.query.DAQConfigQueryElement;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse;
import ch.psi.daq.domain.query.channels.ChannelsRequest; import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.channels.ChannelsResponse; import ch.psi.daq.domain.query.channels.ChannelsResponse;
public interface QueryManager { public interface QueryManager {
List<ChannelsResponse> getChannels(final ChannelsRequest request) throws Exception; Stream<ChannelsResponse> getChannels(final ChannelsRequest request) throws Exception;
Collection<ChannelInfos> getChannelInfos(final ChannelNameRequest request) throws Exception; Stream<ChannelConfigurationsResponse> getChannelConfigurations(final ChannelConfigurationsRequest request) throws Exception;
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents(final DAQQueries queries) ChannelConfiguration getChannelConfiguration(final ChannelName channel) throws Exception;
Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> queryConfigs(final DAQConfigQuery query) throws Exception;
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> queryEvents(final DAQQueries queries)
throws Exception; throws Exception;
} }

View File

@ -1,8 +1,8 @@
package ch.psi.daq.queryrest.query; package ch.psi.daq.queryrest.query;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -19,24 +19,26 @@ import org.springframework.context.ApplicationContextAware;
import ch.psi.daq.domain.DataEvent; import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.events.ChannelConfiguration;
import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.json.channels.info.ChannelInfos; import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.ChannelNameRequest; import ch.psi.daq.domain.query.DAQConfigQueryElement;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.backend.BackendQueryImpl; import ch.psi.daq.domain.query.backend.BackendQueryImpl;
import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
import ch.psi.daq.domain.query.channels.ChannelNameCache; import ch.psi.daq.domain.query.channels.BackendsChannelConfigurationCache;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse;
import ch.psi.daq.domain.query.channels.ChannelsRequest; import ch.psi.daq.domain.query.channels.ChannelsRequest;
import ch.psi.daq.domain.query.channels.ChannelsResponse; import ch.psi.daq.domain.query.channels.ChannelsResponse;
import ch.psi.daq.domain.query.processor.QueryProcessor; import ch.psi.daq.domain.query.processor.QueryProcessor;
import ch.psi.daq.query.config.QueryConfig; import ch.psi.daq.query.config.QueryConfig;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.query.model.ChannelInfosStreamImpl;
public class QueryManagerImpl implements QueryManager, ApplicationContextAware { public class QueryManagerImpl implements QueryManager, ApplicationContextAware {
private ChannelNameCache channelNameCache; private BackendsChannelConfigurationCache channelsCache;
private Function<BackendQuery, BackendQueryAnalyzer> queryAnalizerFactory; private Function<BackendQuery, BackendQueryAnalyzer> queryAnalizerFactory;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -45,7 +47,7 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext(); context = backend.getApplicationContext();
channelNameCache = context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class); channelsCache = context.getBean(QueryConfig.BEAN_NAME_HISTORIC_CHANNELS_CACHE, BackendsChannelConfigurationCache.class);
queryAnalizerFactory = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_ANALIZER_FACTORY, Function.class); queryAnalizerFactory = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_ANALIZER_FACTORY, Function.class);
} }
@ -53,41 +55,67 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware {
public void destroy() {} public void destroy() {}
@Override @Override
public List<ChannelsResponse> getChannels(ChannelsRequest request) { public Stream<ChannelsResponse> getChannels(ChannelsRequest request) {
// in case not specified use defaults (e.g. GET) // in case not specified use defaults (e.g. GET)
if (request == null) { if (request == null) {
request = new ChannelsRequest(); request = new ChannelsRequest();
} }
return channelNameCache.getChannels(request); return channelsCache.getChannels(request);
}
public Collection<ChannelInfos> getChannelInfos(final ChannelNameRequest request) {
// set backends if not defined yet
channelNameCache.configureBackends(request.getChannels());
final Stream<ChannelInfos> stream = request.getRequestsByBackend().entrySet().stream()
.filter(entry -> entry.getKey().getBackendAccess().hasDataReader()
&& entry.getKey().getBackendAccess().hasChannelInfoReader())
.flatMap(entry -> {
return entry.getValue().getChannelInfos(entry.getKey())
.entrySet().stream()
.map(innerEntry -> {
return new ChannelInfosStreamImpl(
new ChannelName(innerEntry.getKey(), entry.getKey()),
innerEntry.getValue());
});
});
// materialize
return stream.collect(Collectors.toList());
} }
@Override @Override
public List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents( public Stream<ChannelConfigurationsResponse> getChannelConfigurations(ChannelConfigurationsRequest request) {
// in case not specified use defaults (e.g. GET)
if (request == null) {
request = new ChannelConfigurationsRequest();
}
return channelsCache.getChannelConfigurations(request);
}
@Override
public ChannelConfiguration getChannelConfiguration(ChannelName channel) {
return channelsCache.getChannelConfiguration(channel);
}
@Override
public Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> queryConfigs(
final DAQConfigQuery daqQuery) {
// set backends if not defined yet
channelsCache.configureBackends(daqQuery.getChannels());
Stream<Triple<BackendQuery, ChannelName, ?>> resultStreams =
BackendQueryImpl
.getBackendQueries(daqQuery)
.stream()
.filter(
query -> query.getBackend().getBackendAccess().hasStreamEventReader())
.flatMap(
query -> {
/* all the magic happens here */
final Map<String, Stream<? extends ChannelConfiguration>> channelToConfig =
query.getChannelConfigurations();
return channelToConfig.entrySet().stream()
.map(entry -> {
return Triple.of(
query,
new ChannelName(entry.getKey(), query.getBackend()),
entry.getValue());
});
});
return Pair.of(daqQuery, resultStreams);
}
@Override
public List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> queryEvents(
final DAQQueries queries) { final DAQQueries queries) {
// set backends if not defined yet // set backends if not defined yet
channelNameCache.configureBackends(queries); for (DAQQueryElement daqQuery : queries) {
channelsCache.configureBackends(daqQuery.getChannels());
}
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results = final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results =
new ArrayList<>(queries.getQueries().size()); new ArrayList<>(queries.getQueries().size());
@ -102,7 +130,8 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware {
&& query.getBackend().getBackendAccess().hasQueryProcessor()) && query.getBackend().getBackendAccess().hasQueryProcessor())
.flatMap( .flatMap(
query -> { query -> {
final QueryProcessor processor = query.getBackend().getBackendAccess().getQueryProcessor(); final QueryProcessor processor =
query.getBackend().getBackendAccess().getQueryProcessor();
final BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query); final BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query);
/* all the magic happens here */ /* all the magic happens here */

View File

@ -1,44 +0,0 @@
package ch.psi.daq.queryrest.query.model;
import java.util.Iterator;
import java.util.stream.Stream;
import com.fasterxml.jackson.annotation.JsonIgnore;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.json.channels.info.ChannelInfo;
import ch.psi.daq.domain.json.channels.info.ChannelInfos;
public class ChannelInfosStreamImpl implements ChannelInfos {
private ChannelName channel;
private Stream<? extends ChannelInfo> infos;
public ChannelInfosStreamImpl() {}
public ChannelInfosStreamImpl(final ChannelName channel, final Stream<? extends ChannelInfo> infos) {
this.channel = channel;
this.infos = infos;
}
@Override
public ChannelName getChannel() {
return channel;
}
public Stream<? extends ChannelInfo> getInfos() {
// can only be consumed once
return infos;
}
@JsonIgnore
@Override
public Iterator<ChannelInfo> iterator() {
return getChannelInfos().iterator();
}
@JsonIgnore
@Override
public Stream<ChannelInfo> getChannelInfos() {
return infos.map(info -> (ChannelInfo) info);
}
}

View File

@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonEncoding;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.domain.query.response.ResponseImpl; import ch.psi.daq.domain.query.response.ResponseImpl;
@ -20,8 +21,32 @@ public abstract class AbstractHTTPResponse extends ResponseImpl {
} }
@JsonIgnore @JsonIgnore
public abstract void respond(final ApplicationContext context, final DAQQueries queries, public abstract void validateQuery(final Object queryObj);
HttpServletResponse httpResponse) throws Exception;
@JsonIgnore
public boolean useTableFormat(final Object queryObj) {
if (queryObj instanceof DAQQueries) {
final DAQQueries queries = (DAQQueries) queryObj;
for (final DAQQueryElement query : queries) {
if (query.getMapping() != null) {
return true;
}
}
} else if (queryObj instanceof DAQQueryElement) {
return ((DAQQueryElement) queryObj).getMapping() != null;
}
return false;
}
@JsonIgnore
public abstract <R> void respond(
final ApplicationContext context,
final HttpServletResponse httpResponse,
final Object query, final R result,
final ResponseFormatter<R> formatter)
throws Exception;
/** /**
* Configures the output stream and headers according to whether compression is wanted or not. * Configures the output stream and headers according to whether compression is wanted or not.

View File

@ -0,0 +1,16 @@
package ch.psi.daq.queryrest.response;
import java.io.OutputStream;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
public interface ResponseFormatter<R> {
void format(
final JsonFactory factory,
final ObjectMapper mapper,
final R result,
final OutputStream out,
final AbstractHTTPResponse response) throws Exception;
}

View File

@ -1,30 +1,27 @@
package ch.psi.daq.queryrest.response; package ch.psi.daq.queryrest.response;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.ServletResponse; import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.response.Response;
public interface ResponseStreamWriter { public interface ResponseStreamWriter {
/** /**
* Responding with the the contents of the stream by writing into the output stream of the * Responding with the the contents of the stream by writing into the output stream of the
* {@link ServletResponse}. * {@link ServletResponse}.
* *
* @param results The results results * @param <R> The JAVA result type
* @param query The query
* @param result The result
* @param out The OutputStream * @param out The OutputStream
* @param response The Response * @param response The Response
* @param formatter The ResponseFormatter
* @throws Exception thrown if writing to the output stream fails * @throws Exception thrown if writing to the output stream fails
*/ */
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, public <R> void respond(
final OutputStream out, final Response response) throws Exception; final Object query,
final R result,
final OutputStream out,
final AbstractHTTPResponse response,
final ResponseFormatter<R> formatter) throws Exception;
} }

View File

@ -1,13 +1,9 @@
package ch.psi.daq.queryrest.response.csv; package ch.psi.daq.queryrest.response.csv;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
@ -15,16 +11,14 @@ import org.springframework.context.ApplicationContext;
import com.hazelcast.util.collection.ArrayUtils; import com.hazelcast.util.collection.ArrayUtils;
import ch.psi.daq.domain.FieldNames; import ch.psi.daq.domain.FieldNames;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
public class CSVHTTPResponse extends AbstractHTTPResponse { public class CSVHTTPResponse extends AbstractHTTPResponse {
private static final Logger LOGGER = LoggerFactory.getLogger(CSVHTTPResponse.class); private static final Logger LOGGER = LoggerFactory.getLogger(CSVHTTPResponse.class);
@ -42,46 +36,42 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
} }
@Override @Override
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse httpResponse) public void validateQuery(final Object queryObj) {
throws Exception { if (queryObj instanceof DAQQueries) {
final OutputStream out = handleCompressionAndResponseHeaders(httpResponse, CONTENT_TYPE); final DAQQueries queries = (DAQQueries) queryObj;
// do csv specific validations
validateQueries(queries);
try {
LOGGER.debug("Executing query '{}'", queries);
final QueryManager queryManager = context.getBean(QueryManager.class);
final CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class);
// execute query
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
queryManager.getEvents(queries);
// write the response back to the client using java 8 streams
streamWriter.respond(result, out, this);
} catch (Exception e) {
LOGGER.error("Failed to execute query '{}'.", queries, e);
throw e;
}
}
protected void validateQueries(final DAQQueries queries) {
for (final DAQQueryElement query : queries) { for (final DAQQueryElement query : queries) {
if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation() if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation()
.getAggregationType()))) { .getAggregationType()))) {
// We allow only no aggregation or value aggregation as // We allow only no aggregation or value aggregation as
// extrema: nested structure and not clear how to map it to one line // extrema: nested structure and not clear how to map it to one line
// index: value is an array of Statistics whose size is not clear at initialization time // index: value is an array of Statistics whose size is not clear at initialization
final String message = "CSV export does not support '" + query.getAggregation().getAggregationType() + "'"; // time
final String message =
"CSV export does not support '" + query.getAggregation().getAggregationType() + "'";
LOGGER.warn(message); LOGGER.warn(message);
throw new IllegalArgumentException(message); throw new IllegalArgumentException(message);
} }
if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) {
query.addField(QueryField.globalMillis); query.addField(EventField.globalMillis);
} }
} }
} }
}
@Override
public <R> void respond(
final ApplicationContext context,
final HttpServletResponse response,
final Object query,
final R result,
final ResponseFormatter<R> formatter) throws Exception {
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
final CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class);
// write the response back to the client using java 8 streams
streamWriter.respond(query, result, out, this, formatter);
}
} }

View File

@ -40,6 +40,7 @@ import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
@ -47,9 +48,11 @@ import ch.psi.daq.domain.query.mapping.IncompleteStrategy;
import ch.psi.daq.domain.query.mapping.Mapping; import ch.psi.daq.domain.query.mapping.Mapping;
import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.Extrema; import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/** /**
@ -84,6 +87,23 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Override @Override
public <R> void respond(
final Object query,
final R result,
final OutputStream out,
final AbstractHTTPResponse response,
final ResponseFormatter<R> formatter) throws Exception {
if (query instanceof DAQQueries) {
respond((List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>>) result,
out, response);
} else {
final String message = String.format("'%s' has no response type for '%s'.", query);
LOGGER.error(message);
throw new IllegalStateException(message);
}
}
@SuppressWarnings("unchecked")
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception { final OutputStream out, final Response response) throws Exception {
if (results.size() > 1) { if (results.size() > 1) {
@ -200,15 +220,15 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio
private void setupChannelColumns(final DAQQueryElement daqQuery, final BackendQuery backendQuery, private void setupChannelColumns(final DAQQueryElement daqQuery, final BackendQuery backendQuery,
final ChannelName channelName, final ChannelName channelName,
final Collection<String> header, Collection<Pair<ChannelName, Function<DataEvent, String>>> accessors) { final Collection<String> header, Collection<Pair<ChannelName, Function<DataEvent, String>>> accessors) {
final Set<QueryField> queryFields = daqQuery.getFields(); final Set<EventField> queryFields = daqQuery.getFields();
final List<Aggregation> aggregations = final List<Aggregation> aggregations =
daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null; daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null;
final List<Extrema> extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null; final List<Extrema> extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null;
final BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery); final BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
for (final QueryField field : queryFields) { for (final EventField field : queryFields) {
if (!(QueryField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) { if (!(EventField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) {
final StringBuilder buf = new StringBuilder(3) final StringBuilder buf = new StringBuilder(3)
.append(channelName.getName()) .append(channelName.getName())
.append(DELIMITER_CHANNELNAME_FIELDNAME) .append(DELIMITER_CHANNELNAME_FIELDNAME)
@ -225,7 +245,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio
final StringBuilder buf = new StringBuilder(5) final StringBuilder buf = new StringBuilder(5)
.append(channelName.getName()) .append(channelName.getName())
.append(DELIMITER_CHANNELNAME_FIELDNAME) .append(DELIMITER_CHANNELNAME_FIELDNAME)
.append(QueryField.value.name()) .append(EventField.value.name())
.append(DELIMITER_CHANNELNAME_FIELDNAME) .append(DELIMITER_CHANNELNAME_FIELDNAME)
.append(aggregation.name()); .append(aggregation.name());
@ -236,8 +256,8 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio
if (extrema != null && queryAnalyzer.isAggregationEnabled()) { if (extrema != null && queryAnalyzer.isAggregationEnabled()) {
for (final Extrema extremum : extrema) { for (final Extrema extremum : extrema) {
for (final QueryField field : queryFields) { for (final EventField field : queryFields) {
final Function<DataEvent, Object> accessor = extremum.getAccessor(field); final Function<Object, Object> accessor = extremum.getAccessor(field);
if (accessor != null) { if (accessor != null) {
final StringBuilder buf = new StringBuilder(7) final StringBuilder buf = new StringBuilder(7)
.append(channelName.getName()) .append(channelName.getName())

View File

@ -10,11 +10,11 @@ public class QueryFieldStringifyer implements Function<DataEvent, String> {
public static final String OPEN_BRACKET = "["; public static final String OPEN_BRACKET = "[";
public static final String CLOSE_BRACKET = "]"; public static final String CLOSE_BRACKET = "]";
private Function<DataEvent, Object> accessor; private Function<Object, Object> accessor;
private String nonValue; private String nonValue;
private String arraySeparator; private String arraySeparator;
public QueryFieldStringifyer(Function<DataEvent, Object> accessor, String nonValue, String arraySeparator) { public QueryFieldStringifyer(Function<Object, Object> accessor, String nonValue, String arraySeparator) {
this.accessor = accessor; this.accessor = accessor;
this.nonValue = nonValue; this.nonValue = nonValue;
this.arraySeparator = arraySeparator; this.arraySeparator = arraySeparator;

View File

@ -0,0 +1,88 @@
package ch.psi.daq.queryrest.response.formatter;
import java.io.OutputStream;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import ch.psi.daq.domain.query.operation.ConfigField;
import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
public class AnyResponseFormatter implements ResponseFormatter<Object>,
ApplicationContextAware {
private String eventFieldsBeanName;
private String configFieldsBeanName;
private Set<String> eventFields;
private Set<String> configFields;
public AnyResponseFormatter(final String eventFieldsBeanName, final String configFieldsBeanName) {
this.eventFieldsBeanName = eventFieldsBeanName;
this.configFieldsBeanName = configFieldsBeanName;
}
public AnyResponseFormatter(final Set<String> eventFields, final Set<String> configFields) {
this.eventFields = eventFields;
this.configFields = configFields;
}
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
if (eventFields == null) {
final Set<EventField> defaultEventFields =
context.getBean(eventFieldsBeanName, Set.class);
this.eventFields =
defaultEventFields.stream().map(QueryField::getName)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
if (configFields == null) {
final Set<ConfigField> defaultConfigFields =
context.getBean(configFieldsBeanName, Set.class);
this.configFields =
defaultConfigFields.stream().map(QueryField::getName)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
}
public Set<String> getEventFields() {
return Collections.unmodifiableSet(eventFields);
}
public Set<String> getConfigFields() {
return Collections.unmodifiableSet(configFields);
}
@Override
public void format(
final JsonFactory factory,
final ObjectMapper mapper,
final Object result,
final OutputStream out,
final AbstractHTTPResponse response) throws Exception {
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
final ObjectWriter writer = DAQQueriesResponseFormatter.configureWriter(mapper, eventFields, configFields);
try {
writer.writeValue(generator, result);
} finally {
generator.flush();
generator.close();
}
}
}

View File

@ -0,0 +1,127 @@
package ch.psi.daq.queryrest.response.formatter;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQConfigQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.ConfigField;
import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
public class DAQConfigQueryResponseFormatter
implements ResponseFormatter<Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>>,
ApplicationContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(DAQConfigQueryResponseFormatter.class);
public static final String CONFIGS_RESP_FIELD = "configs";
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {}
@Override
public void format(
final JsonFactory factory,
final ObjectMapper mapper,
final Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> result,
final OutputStream out,
final AbstractHTTPResponse response) throws Exception {
final AtomicReference<Exception> exception = new AtomicReference<>();
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
final DAQConfigQueryElement daqQuery = result.getKey();
final Set<String> includedFields = getFields(daqQuery, true);
final ObjectWriter writer = DAQQueriesResponseFormatter.configureWriter(mapper, null, includedFields);
try {
writeArrayFormat(generator, writer, result, exception);
} finally {
generator.flush();
generator.close();
}
if (exception.get() != null) {
throw exception.get();
}
}
private void writeArrayFormat(final JsonGenerator generator, final ObjectWriter writer,
final Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> entryy,
final AtomicReference<Exception> exception) {
final DAQConfigQueryElement daqQuery = entryy.getKey();
try {
generator.writeStartArray();
entryy.getValue()
/* ensure elements are sequentially written */
.sequential()
.forEach(
triple -> {
try {
generator.writeStartObject();
generator.writeFieldName(EventField.channel.name());
writer.writeValue(generator, triple.getMiddle());
generator.writeFieldName(CONFIGS_RESP_FIELD);
writer.writeValue(generator, triple.getRight());
generator.writeEndObject();
} catch (Exception e) {
LOGGER.error("Could not write channel name of channel configuration '{}'",
triple.getMiddle(),
e);
exception.compareAndSet(null, e);
} finally {
if (triple.getRight() instanceof Stream) {
((Stream<?>) (triple.getRight())).close();
}
}
});
generator.writeEndArray();
} catch (Exception e) {
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
exception.compareAndSet(null, e);
}
}
private static Set<String> getFields(final DAQConfigQueryElement query,
final boolean removeIdentifiers) {
final Set<? extends QueryField> queryFields = query.getFields();
final Set<String> includedFields =
new LinkedHashSet<String>(queryFields.size());
for (final QueryField field : queryFields) {
includedFields.add(field.getName());
}
if (removeIdentifiers) {
// do not write channel/backend since it is already provided as key in mapping
includedFields.remove(ConfigField.channel.name());
includedFields.remove(ConfigField.name.name());
includedFields.remove(ConfigField.backend.name());
}
return includedFields;
}
}

View File

@ -0,0 +1,328 @@
package ch.psi.daq.queryrest.response.formatter;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import ch.psi.daq.common.stream.match.ListCreator;
import ch.psi.daq.common.stream.match.ListFiller;
import ch.psi.daq.common.stream.match.Padder;
import ch.psi.daq.common.stream.match.StreamMatcher;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.bin.BinningStrategy;
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinPulse;
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinTime;
import ch.psi.daq.domain.query.mapping.IncompleteStrategy;
import ch.psi.daq.domain.query.mapping.Mapping;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.domain.request.range.RequestRange;
import ch.psi.daq.query.bin.aggregate.BinnedValueCombiner;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.model.HistoricChannelConfigurationPropertyFilterMixin;
import ch.psi.daq.queryrest.model.PropertyFilterMixin;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
public class DAQQueriesResponseFormatter implements ResponseFormatter<List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>>>, ApplicationContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponseStreamWriter.class);
public static final String DATA_RESP_FIELD = "data";
public static final Mapping DEFAULT_MAPPING = new Mapping(IncompleteStrategy.PROVIDE_AS_IS);
private static final long MILLIS_PER_PULSE = TimeUtils.MILLIS_PER_PULSE;
private static final Function<DataEvent, ChannelName> KEY_PROVIDER = (event) -> new ChannelName(event.getChannel(),
event.getBackend());
// try to match sync data (bsread) with non sync data (epics) based on the time usin 10 millis
// buckets.
private static final ToLongFunction<DataEvent> MATCHER_PROVIDER = (event) -> event.getGlobalMillis()
/ MILLIS_PER_PULSE;
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
// aggregations (only used for table format)
private Set<String> defaultEventResponseAggregations;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
final Set<Aggregation> defaultEventResponseAggregations =
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class);;
this.defaultEventResponseAggregations =
defaultEventResponseAggregations.stream().map(Aggregation::name)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
@Override
public void format(
final JsonFactory factory,
final ObjectMapper mapper,
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out,
final AbstractHTTPResponse response) throws Exception {
final AtomicReference<Exception> exception = new AtomicReference<>();
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
try {
if (results.size() > 1) {
generator.writeStartArray();
}
results
.forEach(entryy -> {
final DAQQueryElement daqQuery = entryy.getKey();
if (response.useTableFormat(daqQuery)) {
final Set<String> includedFields = getFields(daqQuery, false);
/* make sure identifiers are available */
includedFields.add(EventField.channel.name());
includedFields.add(EventField.backend.name());
// issue ATEST-633
if (!containsAggregation(includedFields)) {
includedFields.addAll(defaultEventResponseAggregations);
}
final ObjectWriter writer = configureWriter(mapper, includedFields, null);
writeTableFormat(generator, writer, entryy, exception);
} else {
final Set<String> includedFields = getFields(daqQuery, true);
final ObjectWriter writer = configureWriter(mapper, includedFields, null);
writeArrayFormat(generator, writer, entryy, exception);
}
});
} finally {
if (results.size() > 1) {
generator.writeEndArray();
}
generator.flush();
generator.close();
}
if (exception.get() != null) {
throw exception.get();
}
}
private static void writeArrayFormat(final JsonGenerator generator, final ObjectWriter writer,
final Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> entryy,
final AtomicReference<Exception> exception) {
final DAQQueryElement daqQuery = entryy.getKey();
try {
generator.writeStartArray();
entryy.getValue()
/* ensure elements are sequentially written */
.sequential()
.forEach(
triple -> {
try {
generator.writeStartObject();
generator.writeFieldName(EventField.channel.name());
writer.writeValue(generator, triple.getMiddle());
generator.writeFieldName(DATA_RESP_FIELD);
writer.writeValue(generator, triple.getRight());
generator.writeEndObject();
} catch (Exception e) {
LOGGER.error("Could not write channel name of channel '{}'", triple.getMiddle(),
e);
exception.compareAndSet(null, e);
} finally {
if (triple.getRight() instanceof Stream) {
((Stream<?>) (triple.getRight())).close();
}
}
});
generator.writeEndArray();
} catch (Exception e) {
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
exception.compareAndSet(null, e);
}
}
@SuppressWarnings("unchecked")
private static void writeTableFormat(JsonGenerator generator, ObjectWriter writer,
Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>> entryy,
AtomicReference<Exception> exception) {
/* get DataEvent stream of sub-queries for later match */
final Map<ChannelName, Stream<DataEvent>> streams =
new LinkedHashMap<>();
final AtomicReference<BackendQuery> backendQueryRef = new AtomicReference<>();
final DAQQueryElement daqQuery = entryy.getKey();
entryy.getValue()
.sequential()
.forEach(
triple -> {
backendQueryRef.compareAndSet(null, triple.getLeft());
if (triple.getRight() instanceof Stream) {
streams.put(triple.getMiddle(), ((Stream<DataEvent>) triple.getRight()));
} else {
final String message =
String.format("Expect a DataEvent Stream for '%s' but got '%s'.",
triple.getMiddle(), triple.getRight().getClass().getSimpleName());
LOGGER.warn(message);
streams.put(triple.getMiddle(), Stream.empty());
}
});
final BackendQuery backendQuery = backendQueryRef.get();
final RequestRange requestRange = backendQuery.getRequest().getRequestRange();
BinningStrategy binningStrategy = backendQuery.getBinningStrategy();
final Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING);
final Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(backendQuery);
ToLongFunction<DataEvent> matchProvider = binningStrategy;
if (binningStrategy == null) {
matchProvider = MATCHER_PROVIDER;
if (requestRange.isPulseIdRangeDefined()) {
binningStrategy = new BinningStrategyPerBinPulse(1);
} else if (requestRange.isTimeRangeDefined()) {
binningStrategy = new BinningStrategyPerBinTime(MILLIS_PER_PULSE);
} else {
final String message = "Either time or pulseId range must be defined by the query!";
LOGGER.error(message);
throw new IllegalStateException(message);
}
}
binningStrategy.setRequestRange(requestRange);
/* online matching of the stream's content */
final StreamMatcher<ChannelName, DataEvent, List<DataEvent>> streamMatcher =
new StreamMatcher<>(
KEY_PROVIDER,
matchProvider,
new ListCreator<ChannelName, DataEvent>(),
new ListFiller<ChannelName, DataEvent>(),
new BinnedValueCombiner(binningStrategy),
padder,
streams.values());
final Iterator<List<DataEvent>> streamsMatchIter = streamMatcher.iterator();
try {
generator.writeStartObject();
generator.writeFieldName(DATA_RESP_FIELD);
writer.writeValue(generator, streamsMatchIter);
generator.writeEndObject();
} catch (Exception e) {
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
exception.compareAndSet(null, e);
} finally {
if (streamMatcher != null) {
try {
streamMatcher.close();
} catch (Throwable t) {
LOGGER.error(
"Something went wrong while closing stream matcher for JSON table response writer.",
t);
}
}
}
}
/**
* Configures the writer dynamically by including the fields which should be included in the
* response.
*
* @param mapper The ObjectMapper
* @param includedEventFields set of strings which correspond to the getter method names of the
* classes registered as a mixed-in
* @param includedConfigFields set of strings which correspond to the getter method names of the
* classes registered as a mixed-in
* @return the configured writer that includes the specified fields
*/
public static ObjectWriter configureWriter(final ObjectMapper mapper, final Set<String> includedEventFields,
final Set<String> includedConfigFields) {
final SimpleFilterProvider propertyFilter = new SimpleFilterProvider();
if (includedEventFields != null) {
propertyFilter.addFilter(PropertyFilterMixin.FILTER_NAME,
SimpleBeanPropertyFilter.filterOutAllExcept(includedEventFields));
}
if (includedConfigFields != null) {
propertyFilter.addFilter(HistoricChannelConfigurationPropertyFilterMixin.FILTER_NAME,
SimpleBeanPropertyFilter.filterOutAllExcept(includedConfigFields));
}
// only write the properties not excluded in the filter
final ObjectWriter writer = mapper.writer(propertyFilter);
return writer;
}
private static Set<String> getFields(final DAQQueryElement query, final boolean removeIdentifiers) {
final Set<? extends QueryField> queryFields = query.getFields();
final List<Aggregation> aggregations =
query.getAggregation() != null ? query.getAggregation().getAggregations() : null;
final List<Extrema> extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null;
final Set<String> includedFields =
new LinkedHashSet<String>(queryFields.size() + (aggregations != null ? aggregations.size() : 0)
+ (extrema != null ? extrema.size() : 0));
for (final QueryField field : queryFields) {
includedFields.add(field.getName());
}
if (aggregations != null) {
for (final Aggregation aggregation : aggregations) {
includedFields.add(aggregation.name());
}
}
if (extrema != null) {
// field of ExtremaCalculator (extrema in BinnedValueCombinedDataEvent and
// BinnedIndexCombinedDataEvent)
includedFields.add("extrema");
}
if (removeIdentifiers) {
// do not write channel/backend since it is already provided as key in mapping
includedFields.remove(EventField.channel.name());
includedFields.remove(EventField.backend.name());
}
return includedFields;
}
private static boolean containsAggregation(final Set<String> includedFields) {
for (final Aggregation aggregation : Aggregation.values()) {
if (includedFields.contains(aggregation.name())) {
return true;
}
}
return false;
}
}

View File

@ -0,0 +1,51 @@
package ch.psi.daq.queryrest.response.json;
import java.io.OutputStream;
import javax.servlet.ServletResponse;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public abstract class AbstractResponseStreamWriter implements ResponseStreamWriter {
protected void init(final Backend backend) {}
@Override
public <R> void respond(
final Object query,
final R result,
final OutputStream out,
final AbstractHTTPResponse response,
final ResponseFormatter<R> formatter) throws Exception {
formatter.format(getJsonFactory(), getObjectMapper(), result, out, response);
// if (query instanceof DAQQueries) {
// DAQQueriesResponseFormatter.respond(getJsonFactory(), getObjectMapper(),
// defaultEventResponseAggregations,
// (List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>>) result,
// out, response);
// } else if (query instanceof DAQConfigQuery) {
// DAQChannelConfigurationQueryResponseWriter.respond(getJsonFactory(), getObjectMapper(),
// (Entry<DAQConfigQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>) result,
// out, response);
// } else {
// AnyResponseWriter.respond(getJsonFactory(), getObjectMapper(), result, out, response,
// defaultEventFields,
// defaultConfigFields);
// }
}
protected abstract JsonFactory getJsonFactory();
protected abstract ObjectMapper getObjectMapper();
}

View File

@ -1,35 +1,25 @@
package ch.psi.daq.queryrest.response.json; package ch.psi.daq.queryrest.response.json;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import com.hazelcast.util.collection.ArrayUtils; import com.hazelcast.util.collection.ArrayUtils;
import ch.psi.daq.domain.FieldNames; import ch.psi.daq.domain.FieldNames;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.ResponseStreamWriter;
public class JSONHTTPResponse extends AbstractHTTPResponse { public class JSONHTTPResponse extends AbstractHTTPResponse {
private static final Logger LOGGER = LoggerFactory.getLogger(JSONHTTPResponse.class);
public static final String FORMAT = "json"; public static final String FORMAT = "json";
public static final String CONTENT_TYPE = MediaType.APPLICATION_JSON_VALUE; public static final String CONTENT_TYPE = MediaType.APPLICATION_JSON_VALUE;
@ -43,47 +33,53 @@ public class JSONHTTPResponse extends AbstractHTTPResponse {
} }
@Override @Override
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { public void validateQuery(final Object query) {
JSONHTTPResponse.defaultQueryValidation(query);
}
// @Override
// public void respond(final ApplicationContext context, final HttpServletResponse response,
// final Object query,
// final Object result) throws Exception {
// final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
// final boolean useTableFormat = JSONHTTPResponse.useTableFormat(query);
//
// final ResponseStreamWriter streamWriter;
// if (useTableFormat) {
// streamWriter = context.getBean(JSONTableResponseStreamWriter.class);
// } else {
// streamWriter = context.getBean(JSONResponseStreamWriter.class);
// }
//
// // write the response back to the client using java 8 streams
// streamWriter.respond(query, result, out, this);
// }
@Override
public <R> void respond(
final ApplicationContext context,
final HttpServletResponse response,
final Object query,
final R result,
final ResponseFormatter<R> formatter) throws Exception {
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); final ResponseStreamWriter streamWriter = context.getBean(JSONResponseStreamWriter.class);
try {
LOGGER.debug("Executing query '{}'", queries);
final QueryManager queryManager = context.getBean(QueryManager.class);
final ResponseStreamWriter streamWriter;
if (hasMapping) {
streamWriter = context.getBean(JSONTableResponseStreamWriter.class);
} else {
streamWriter = context.getBean(JSONResponseStreamWriter.class);
}
// execute query
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
queryManager.getEvents(queries);
// write the response back to the client using java 8 streams // write the response back to the client using java 8 streams
streamWriter.respond(result, out, this); streamWriter.respond(query, result, out, this, formatter);
} catch (Exception e) {
LOGGER.error("Failed to execute query '{}'.", queries, e);
throw e;
}
} }
public static void defaultQueryValidation(final Object queryObj) {
public static boolean validateQueries(final DAQQueries queries) { if (queryObj instanceof DAQQueries) {
boolean hasMapping = false; final DAQQueries queries = (DAQQueries) queryObj;
for (final DAQQueryElement query : queries) { for (final DAQQueryElement query : queries) {
if (query.getMapping() != null) { if (query.getMapping() != null) {
hasMapping = true;
if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) {
query.addField(QueryField.globalMillis); query.addField(EventField.globalMillis);
}
} }
} }
} }
return hasMapping;
} }
} }

View File

@ -1,181 +1,39 @@
package ch.psi.daq.queryrest.response.json; package ch.psi.daq.queryrest.response.json;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.model.PropertyFilterMixin;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class JSONResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
public static final String DATA_RESP_FIELD = "data";
private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponseStreamWriter.class);
public class JSONResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware {
private ObjectMapper mapper; private ObjectMapper mapper;
private JsonFactory factory; private JsonFactory factory;
@Override @Override
public void setApplicationContext(ApplicationContext context) throws BeansException { public void setApplicationContext(ApplicationContext context) throws BeansException {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext(); context = backend.getApplicationContext();
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class);
super.init(backend);
} }
@Override @Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, protected JsonFactory getJsonFactory() {
final OutputStream out, final Response response) throws Exception { return factory;
respond(factory, mapper, results, out, response);
} }
public static Set<String> getFields(final DAQQueryElement query, final boolean removeIdentifiers) { @Override
final Set<QueryField> queryFields = query.getFields(); protected ObjectMapper getObjectMapper() {
final List<Aggregation> aggregations = return mapper;
query.getAggregation() != null ? query.getAggregation().getAggregations() : null;
final List<Extrema> extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null;
final Set<String> includedFields =
new LinkedHashSet<String>(queryFields.size() + (aggregations != null ? aggregations.size() : 0)
+ (extrema != null ? extrema.size() : 0));
for (final QueryField field : queryFields) {
includedFields.add(field.name());
}
if (aggregations != null) {
for (final Aggregation aggregation : aggregations) {
includedFields.add(aggregation.name());
}
}
if (extrema != null) {
// field of ExtremaCalculator (extrema in BinnedValueCombinedDataEvent and
// BinnedIndexCombinedDataEvent)
includedFields.add("extrema");
}
if (removeIdentifiers) {
// do not write channel/backend since it is already provided as key in mapping
includedFields.remove(QueryField.channel.name());
includedFields.remove(QueryField.backend.name());
}
return includedFields;
}
public static void respond(final JsonFactory factory, final ObjectMapper mapper,
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception {
final AtomicReference<Exception> exception = new AtomicReference<>();
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
try {
if (results.size() > 1) {
generator.writeStartArray();
}
results
.forEach(entryy -> {
final DAQQueryElement daqQuery = entryy.getKey();
final Set<String> includedFields = getFields(daqQuery, true);
final ObjectWriter writer = configureWriter(includedFields, mapper);
try {
generator.writeStartArray();
entryy.getValue()
/* ensure elements are sequentially written */
.sequential()
.forEach(
triple -> {
try {
generator.writeStartObject();
generator.writeFieldName(QueryField.channel.name());
writer.writeValue(generator, triple.getMiddle());
generator.writeFieldName(DATA_RESP_FIELD);
writer.writeValue(generator, triple.getRight());
generator.writeEndObject();
} catch (Exception e) {
LOGGER.error("Could not write channel name of channel '{}'", triple.getMiddle(),
e);
exception.compareAndSet(null, e);
} finally {
if (triple.getRight() instanceof Stream) {
((Stream<?>) (triple.getRight())).close();
}
}
});
generator.writeEndArray();
} catch (Exception e) {
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
exception.compareAndSet(null, e);
}
});
} finally {
if (results.size() > 1) {
generator.writeEndArray();
}
generator.flush();
generator.close();
}
if (exception.get() != null) {
throw exception.get();
}
}
/**
* Configures the writer dynamically by including the fields which should be included in the
* response.
*
* @param includedFields set of strings which correspond to the getter method names of the
* classes registered as a mixed-in
* @param mapper The ObjectMapper
* @return the configured writer that includes the specified fields
*/
public static ObjectWriter configureWriter(final Set<String> includedFields, final ObjectMapper mapper) {
final SimpleFilterProvider propertyFilter = new SimpleFilterProvider();
propertyFilter.addFilter(PropertyFilterMixin.FILTER_NAME, SimpleBeanPropertyFilter.filterOutAllExcept(includedFields));
// only write the properties not excluded in the filter
final ObjectWriter writer = mapper.writer(propertyFilter);
return writer;
} }
} }

View File

@ -1,232 +0,0 @@
package ch.psi.daq.queryrest.response.json;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import ch.psi.daq.common.stream.match.ListCreator;
import ch.psi.daq.common.stream.match.ListFiller;
import ch.psi.daq.common.stream.match.Padder;
import ch.psi.daq.common.stream.match.StreamMatcher;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.bin.BinningStrategy;
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinPulse;
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinTime;
import ch.psi.daq.domain.query.mapping.IncompleteStrategy;
import ch.psi.daq.domain.query.mapping.Mapping;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.QueryField;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.domain.request.range.RequestRange;
import ch.psi.daq.query.bin.aggregate.BinnedValueCombiner;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class JSONTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(JSONTableResponseStreamWriter.class);
public static final Mapping DEFAULT_MAPPING = new Mapping(IncompleteStrategy.PROVIDE_AS_IS);
private static final long MILLIS_PER_PULSE = TimeUtils.MILLIS_PER_PULSE;
private static final Function<DataEvent, ChannelName> KEY_PROVIDER = (event) -> new ChannelName(event.getChannel(),
event.getBackend());
// try to match sync data (bsread) with non sync data (epics) based on the time usin 10 millis
// buckets.
private static final ToLongFunction<DataEvent> MATCHER_PROVIDER = (event) -> event.getGlobalMillis()
/ MILLIS_PER_PULSE;
private ObjectMapper mapper;
private JsonFactory factory;
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
// aggregations
private Set<String> defaultResponseAggregationsStr;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext();
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class);
final Set<Aggregation> defaultResponseAggregations =
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
defaultResponseAggregationsStr =
defaultResponseAggregations.stream().map(Aggregation::name)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
@Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception {
respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
}
@SuppressWarnings("unchecked")
public static void respond(final JsonFactory factory,
final ObjectMapper mapper, final Set<String> defaultResponseAggregationsStr,
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception {
final AtomicReference<Exception> exception = new AtomicReference<>();
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
try {
if (results.size() > 1) {
generator.writeStartArray();
}
results
.forEach(entryy -> {
final DAQQueryElement daqQuery = entryy.getKey();
final Set<String> includedFields = JSONResponseStreamWriter.getFields(daqQuery, false);
/* make sure identifiers are available */
includedFields.add(QueryField.channel.name());
includedFields.add(QueryField.backend.name());
// issue ATEST-633
if (!containsAggregation(includedFields)) {
includedFields.addAll(defaultResponseAggregationsStr);
}
final ObjectWriter writer = JSONResponseStreamWriter.configureWriter(includedFields, mapper);
/* get DataEvent stream of sub-queries for later match */
final Map<ChannelName, Stream<DataEvent>> streams =
new LinkedHashMap<>(results.size());
final AtomicReference<BackendQuery> backendQueryRef = new AtomicReference<>();
entryy.getValue()
.sequential()
.forEach(
triple -> {
backendQueryRef.compareAndSet(null, triple.getLeft());
if (triple.getRight() instanceof Stream) {
streams.put(triple.getMiddle(), ((Stream<DataEvent>) triple.getRight()));
} else {
final String message =
String.format("Expect a DataEvent Stream for '%s' but got '%s'.",
triple.getMiddle(), triple.getRight().getClass().getSimpleName());
LOGGER.warn(message);
streams.put(triple.getMiddle(), Stream.empty());
}
});
final BackendQuery backendQuery = backendQueryRef.get();
final RequestRange requestRange = backendQuery.getRequest().getRequestRange();
BinningStrategy binningStrategy = backendQuery.getBinningStrategy();
final Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING);
final Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(backendQuery);
ToLongFunction<DataEvent> matchProvider = binningStrategy;
if (binningStrategy == null) {
matchProvider = MATCHER_PROVIDER;
if (requestRange.isPulseIdRangeDefined()) {
binningStrategy = new BinningStrategyPerBinPulse(1);
} else if (requestRange.isTimeRangeDefined()) {
binningStrategy = new BinningStrategyPerBinTime(MILLIS_PER_PULSE);
} else {
final String message = "Either time or pulseId range must be defined by the query!";
LOGGER.error(message);
throw new IllegalStateException(message);
}
}
binningStrategy.setRequestRange(requestRange);
/* online matching of the stream's content */
final StreamMatcher<ChannelName, DataEvent, List<DataEvent>> streamMatcher =
new StreamMatcher<>(
KEY_PROVIDER,
matchProvider,
new ListCreator<ChannelName, DataEvent>(),
new ListFiller<ChannelName, DataEvent>(),
new BinnedValueCombiner(binningStrategy),
padder,
streams.values());
final Iterator<List<DataEvent>> streamsMatchIter = streamMatcher.iterator();
try {
generator.writeStartObject();
generator.writeFieldName(JSONResponseStreamWriter.DATA_RESP_FIELD);
writer.writeValue(generator, streamsMatchIter);
generator.writeEndObject();
} catch (Exception e) {
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
exception.compareAndSet(null, e);
} finally {
if (streamMatcher != null) {
try {
streamMatcher.close();
} catch (Throwable t) {
LOGGER.error(
"Something went wrong while closing stream matcher for JSON table response writer.",
t);
}
}
}
});
} catch (IOException e) {
LOGGER.error("Could not write JSON.", e);
exception.compareAndSet(null, e);
} finally {
if (results.size() > 1) {
generator.writeEndArray();
}
generator.flush();
generator.close();
}
if (exception.get() != null) {
throw exception.get();
}
}
private static boolean containsAggregation(final Set<String> includedFields) {
for (final Aggregation aggregation : Aggregation.values()) {
if (includedFields.contains(aggregation.name())) {
return true;
}
}
return false;
}
}

View File

@ -1,32 +1,20 @@
package ch.psi.daq.queryrest.response.msgpack; package ch.psi.daq.queryrest.response.msgpack;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.ResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
public class MsgPackHTTPResponse extends AbstractHTTPResponse { public class MsgPackHTTPResponse extends AbstractHTTPResponse {
private static final Logger LOGGER = LoggerFactory.getLogger(MsgPackHTTPResponse.class);
public static final String FORMAT = "msgp"; public static final String FORMAT = "msgp";
public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE; public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE;
@ -40,30 +28,22 @@ public class MsgPackHTTPResponse extends AbstractHTTPResponse {
} }
@Override @Override
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { public void validateQuery(final Object query){
JSONHTTPResponse.defaultQueryValidation(query);
}
@Override
public <R> void respond(
final ApplicationContext context,
final HttpServletResponse response,
final Object query,
final R result,
final ResponseFormatter<R> formatter) throws Exception {
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); final ResponseStreamWriter streamWriter = context.getBean(MsgPackResponseStreamWriter.class);
try {
LOGGER.debug("Executing query '{}'", queries);
final QueryManager queryManager = context.getBean(QueryManager.class);
final ResponseStreamWriter streamWriter;
if (hasMapping) {
streamWriter = context.getBean(MsgPackTableResponseStreamWriter.class);
} else {
streamWriter = context.getBean(MsgPackResponseStreamWriter.class);
}
// execute query
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
queryManager.getEvents(queries);
// write the response back to the client using java 8 streams // write the response back to the client using java 8 streams
streamWriter.respond(result, out, this); streamWriter.respond(query, result, out, this, formatter);
} catch (Exception e) {
LOGGER.error("Failed to execute query '{}'.", queries, e);
throw e;
}
} }
} }

View File

@ -1,35 +1,19 @@
package ch.psi.daq.queryrest.response.msgpack; package ch.psi.daq.queryrest.response.msgpack;
import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.msgpack.jackson.dataformat.MessagePackFactory; import org.msgpack.jackson.dataformat.MessagePackFactory;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.json.AbstractResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
/** public class MsgPackResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware {
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class MsgPackResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
private ObjectMapper mapper; private ObjectMapper mapper;
private MessagePackFactory factory; private MessagePackFactory factory;
@ -40,11 +24,17 @@ public class MsgPackResponseStreamWriter implements ResponseStreamWriter, Applic
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class);
super.init(backend);
} }
@Override @Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, protected JsonFactory getJsonFactory() {
final OutputStream out, final Response response) throws Exception { return factory;
JSONResponseStreamWriter.respond(factory, mapper, results, out, response); }
@Override
protected ObjectMapper getObjectMapper() {
return mapper;
} }
} }

View File

@ -1,64 +0,0 @@
package ch.psi.daq.queryrest.response.msgpack;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.msgpack.jackson.dataformat.MessagePackFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class MsgPackTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
private ObjectMapper mapper;
private MessagePackFactory factory;
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
// aggregations
private Set<String> defaultResponseAggregationsStr;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext();
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class);
final Set<Aggregation> defaultResponseAggregations =
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
defaultResponseAggregationsStr =
defaultResponseAggregations.stream().map(Aggregation::name)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
@Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception {
JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
}
}

View File

@ -1,32 +1,20 @@
package ch.psi.daq.queryrest.response.smile; package ch.psi.daq.queryrest.response.smile;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueries;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseFormat;
import ch.psi.daq.queryrest.query.QueryManager;
import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
import ch.psi.daq.queryrest.response.ResponseFormatter;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.ResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
public class SmileHTTPResponse extends AbstractHTTPResponse { public class SmileHTTPResponse extends AbstractHTTPResponse {
private static final Logger LOGGER = LoggerFactory.getLogger(SmileHTTPResponse.class);
public static final String FORMAT = "smile"; public static final String FORMAT = "smile";
public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE; public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE;
@ -40,30 +28,22 @@ public class SmileHTTPResponse extends AbstractHTTPResponse {
} }
@Override @Override
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { public void validateQuery(final Object query) {
JSONHTTPResponse.defaultQueryValidation(query);
}
@Override
public <R> void respond(
final ApplicationContext context,
final HttpServletResponse response,
final Object query,
final R result,
final ResponseFormatter<R> formatter) throws Exception {
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); final ResponseStreamWriter streamWriter = context.getBean(SmileResponseStreamWriter.class);
try {
LOGGER.debug("Executing query '{}'", queries);
final QueryManager queryManager = context.getBean(QueryManager.class);
final ResponseStreamWriter streamWriter;
if (hasMapping) {
streamWriter = context.getBean(SmileTableResponseStreamWriter.class);
} else {
streamWriter = context.getBean(SmileResponseStreamWriter.class);
}
// execute query
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
queryManager.getEvents(queries);
// write the response back to the client using java 8 streams // write the response back to the client using java 8 streams
streamWriter.respond(result, out, this); streamWriter.respond(query, result, out, this, formatter);
} catch (Exception e) {
LOGGER.error("Failed to execute query '{}'.", queries, e);
throw e;
}
} }
} }

View File

@ -1,36 +1,19 @@
package ch.psi.daq.queryrest.response.smile; package ch.psi.daq.queryrest.response.smile;
import java.io.OutputStream;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.json.AbstractResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class SmileResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
public class SmileResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware {
private ObjectMapper mapper; private ObjectMapper mapper;
private SmileFactory factory; private SmileFactory factory;
@ -41,11 +24,17 @@ public class SmileResponseStreamWriter implements ResponseStreamWriter, Applicat
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class);
super.init(backend);
} }
@Override @Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, protected JsonFactory getJsonFactory() {
final OutputStream out, final Response response) throws Exception { return factory;
JSONResponseStreamWriter.respond(factory, mapper, results, out, response); }
@Override
protected ObjectMapper getObjectMapper() {
return mapper;
} }
} }

View File

@ -1,65 +0,0 @@
package ch.psi.daq.queryrest.response.smile;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.servlet.ServletResponse;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelName;
import ch.psi.daq.domain.query.DAQQueryElement;
import ch.psi.daq.domain.query.backend.BackendQuery;
import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter;
/**
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
* of the current request.
*/
public class SmileTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
private ObjectMapper mapper;
private SmileFactory factory;
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
// aggregations
private Set<String> defaultResponseAggregationsStr;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
context = backend.getApplicationContext();
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class);
final Set<Aggregation> defaultResponseAggregations =
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
defaultResponseAggregationsStr =
defaultResponseAggregations.stream().map(Aggregation::name)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
@Override
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
final OutputStream out, final Response response) throws Exception {
JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
}
}

View File

@ -1,9 +1,13 @@
# defines the fields that are included in the response # defines the fields that are included in the response of an event query
# if no fields have been specified by the user # if no fields have been specified by the user
queryrest.default.response.fields=channel,backend,pulseId,globalSeconds,iocSeconds,shape,eventCount,value queryrest.response.fields.event.query=channel,backend,pulseId,globalSeconds,iocSeconds,shape,eventCount,value
# aggregation which are included in the response by default if aggregation is enabled for a given query # aggregation which are included in the response by default if aggregation is enabled for a given query
queryrest.default.response.aggregations=min,mean,max queryrest.response.fields.event.query.aggregations=min,mean,max
# defines the fields that are included in the response of a config query
queryrest.response.fields.config.query=name,backend,pulseId,globalSeconds,type,shape,source
queryrest.response.fields.config.historic=name,backend,type,shape,source,description
# defines if the writer is a local writer (can write data to filesystem) # defines if the writer is a local writer (can write data to filesystem)
filestorage.writer.local=false filestorage.writer.local=false

View File

@ -21,9 +21,7 @@ import org.springframework.context.ApplicationContext;
import ch.psi.daq.common.serialization.SerializationHelper; import ch.psi.daq.common.serialization.SerializationHelper;
import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.query.channels.ChannelNameCache;
import ch.psi.daq.filestorage.config.FileStorageConfig; import ch.psi.daq.filestorage.config.FileStorageConfig;
import ch.psi.daq.query.config.QueryConfig;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest; import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
public class BackendTest extends AbstractDaqRestTest { public class BackendTest extends AbstractDaqRestTest {
@ -178,20 +176,4 @@ public class BackendTest extends AbstractDaqRestTest {
assertSame(parentContext, context.getParent()); assertSame(parentContext, context.getParent());
} }
} }
@Test
public void testSingleInstance_01() throws Exception {
final Set<ChannelNameCache> caches = new HashSet<>();
for (final Backend backend : Backend.getBackends()) {
ChannelNameCache cache =
backend.getApplicationContext().getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE,
ChannelNameCache.class);
caches.add(cache);
}
assertEquals(1, caches.size());
assertSame(
context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class),
caches.iterator().next());
}
} }

View File

@ -24,7 +24,6 @@ import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.backend.BackendType; import ch.psi.daq.domain.backend.BackendType;
import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.events.ChannelEvent; import ch.psi.daq.domain.events.ChannelEvent;
import ch.psi.daq.domain.query.channels.reader.ChannelInfoReader;
import ch.psi.daq.domain.query.processor.QueryProcessor; import ch.psi.daq.domain.query.processor.QueryProcessor;
import ch.psi.daq.domain.reader.StreamEventReader; import ch.psi.daq.domain.reader.StreamEventReader;
import ch.psi.daq.domain.test.reader.TestReader; import ch.psi.daq.domain.test.reader.TestReader;
@ -68,7 +67,6 @@ public class DaqWebMvcConfig extends WebMvcConfigurerAdapter {
// backendType.initBean(backend, BEAN_NAME_READER, DataReader.class, overload); // backendType.initBean(backend, BEAN_NAME_READER, DataReader.class, overload);
backendType.initBean(backend, BEAN_NAME_READER, StreamEventReader.class, overload, backend); backendType.initBean(backend, BEAN_NAME_READER, StreamEventReader.class, overload, backend);
backendType.initBean(backend, BEAN_NAME_READER, ChannelInfoReader.class, overload, backend);
} }
} }

View File

@ -24,7 +24,7 @@ import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelEventTableImpl; import ch.psi.daq.domain.json.ChannelEventTableImpl;
import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.mapping.Mapping; import ch.psi.daq.domain.query.mapping.Mapping;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest; import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
@ -58,12 +58,12 @@ public abstract class AbstractQueryRestControllerTableTest extends AbstractDaqRe
101), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addField(QueryField.value); request.addField(EventField.value);
request.setResponse(getResponse()); request.setResponse(getResponse());
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);

View File

@ -24,7 +24,7 @@ import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.ChannelEventsImpl; import ch.psi.daq.domain.json.ChannelEventsImpl;
import ch.psi.daq.domain.json.ChannelEventsList; import ch.psi.daq.domain.json.ChannelEventsList;
import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.DAQQuery;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.Response;
import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest; import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
@ -56,12 +56,12 @@ public abstract class AbstractQueryRestControllerTest extends AbstractDaqRestTes
100, 100,
101), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addField(QueryField.value); request.addField(EventField.value);
request.setResponse(getResponse()); request.setResponse(getResponse());
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);

View File

@ -33,7 +33,7 @@ import ch.psi.daq.domain.query.operation.AggregationDescriptor;
import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.Extrema; import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.request.range.RequestRangeDate; import ch.psi.daq.domain.request.range.RequestRangeDate;
import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.domain.request.range.RequestRangeTime; import ch.psi.daq.domain.request.range.RequestRangeTime;
@ -65,16 +65,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -107,7 +107,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals(queryFields.size() * channels.size(), record.size()); assertEquals(queryFields.size() * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
@ -160,11 +160,11 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -196,7 +196,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals(queryFields.size() * channels.size(), record.size()); assertEquals(queryFields.size() * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
@ -252,9 +252,9 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -287,12 +287,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals((queryFields.size() + 1) * channels.size(), record.size()); assertEquals((queryFields.size() + 1) * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
assertEquals( assertEquals(
channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.globalMillis.name(), channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.globalMillis.name(),
record.get(column++)); record.get(column++));
} }
@ -430,16 +430,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -472,7 +472,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals(queryFields.size() * channels.size(), record.size()); assertEquals(queryFields.size() * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
@ -514,16 +514,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -556,7 +556,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals(queryFields.size() * channels.size(), record.size()); assertEquals(queryFields.size() * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
@ -599,18 +599,18 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocDate); queryFields.add(EventField.iocDate);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalDate); queryFields.add(EventField.globalDate);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -643,7 +643,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals(queryFields.size() * channels.size(), record.size()); assertEquals(queryFields.size() * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
@ -756,15 +756,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
request.setAggregation(new AggregationDescriptor().setNrOfBins(2).setAggregations(aggregations)); request.setAggregation(new AggregationDescriptor().setNrOfBins(2).setAggregations(aggregations));
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -797,12 +797,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size()); assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
for (Aggregation aggregation : aggregations) { for (Aggregation aggregation : aggregations) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value
+ CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(),
record.get(column++)); record.get(column++));
} }
@ -859,21 +859,21 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
.setExtrema(extrema)); .setExtrema(extrema));
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
Set<QueryField> extremaFields = new LinkedHashSet<>(); Set<EventField> extremaFields = new LinkedHashSet<>();
for (Extrema extremum : extrema) { for (Extrema extremum : extrema) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
if (extremum.getAccessor(queryField) != null) { if (extremum.getAccessor(queryField) != null) {
extremaFields.add(queryField); extremaFields.add(queryField);
} }
@ -900,7 +900,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
CSVParser csvParser = new CSVParser(reader, csvFormat); CSVParser csvParser = new CSVParser(reader, csvFormat);
// will not be included as it is an aggregation // will not be included as it is an aggregation
queryFields.remove(QueryField.value); queryFields.remove(EventField.value);
try { try {
long pulse = 0; long pulse = 0;
int totalRows = 2; int totalRows = 2;
@ -914,17 +914,17 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
record.size()); record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
for (Aggregation aggregation : aggregations) { for (Aggregation aggregation : aggregations) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value
+ CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(),
record.get(column++)); record.get(column++));
} }
for (Extrema extremum : extrema) { for (Extrema extremum : extrema) {
for (QueryField queryField : extremaFields) { for (EventField queryField : extremaFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME
+ CSVResponseStreamWriter.FIELDNAME_EXTREMA + CSVResponseStreamWriter.FIELDNAME_EXTREMA
+ CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + extremum.name() + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + extremum.name()
@ -995,15 +995,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
request.setAggregation(new AggregationDescriptor().setDurationPerBin(100).setAggregations(aggregations)); request.setAggregation(new AggregationDescriptor().setDurationPerBin(100).setAggregations(aggregations));
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -1025,7 +1025,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
} }
private void checkDateRangeQueryBinSizeAggregate(final List<String> channels, final List<Aggregation> aggregations, private void checkDateRangeQueryBinSizeAggregate(final List<String> channels, final List<Aggregation> aggregations,
final Set<QueryField> queryFields, final String response) throws Exception { final Set<EventField> queryFields, final String response) throws Exception {
CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(CSVResponseStreamWriter.DELIMITER_CVS); CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(CSVResponseStreamWriter.DELIMITER_CVS);
StringReader reader = new StringReader(response); StringReader reader = new StringReader(response);
CSVParser csvParser = new CSVParser(reader, csvFormat); CSVParser csvParser = new CSVParser(reader, csvFormat);
@ -1041,12 +1041,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size()); assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size());
int column = 0; int column = 0;
for (String channel : channels) { for (String channel : channels) {
for (QueryField queryField : queryFields) { for (EventField queryField : queryFields) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(),
record.get(column++)); record.get(column++));
} }
for (Aggregation aggregation : aggregations) { for (Aggregation aggregation : aggregations) {
assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value
+ CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(),
record.get(column++)); record.get(column++));
} }
@ -1102,15 +1102,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
request.setAggregation(new AggregationDescriptor().setDurationPerBin(100)); request.setAggregation(new AggregationDescriptor().setDurationPerBin(100));
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.pulseId); queryFields.add(EventField.pulseId);
queryFields.add(QueryField.iocSeconds); queryFields.add(EventField.iocSeconds);
queryFields.add(QueryField.iocMillis); queryFields.add(EventField.iocMillis);
queryFields.add(QueryField.globalSeconds); queryFields.add(EventField.globalSeconds);
queryFields.add(QueryField.globalMillis); queryFields.add(EventField.globalMillis);
queryFields.add(QueryField.shape); queryFields.add(EventField.shape);
queryFields.add(QueryField.eventCount); queryFields.add(EventField.eventCount);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
@ -1144,9 +1144,9 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest {
channels); channels);
request.setResponse(new CSVHTTPResponse()); request.setResponse(new CSVHTTPResponse());
LinkedHashSet<QueryField> queryFields = new LinkedHashSet<>(); LinkedHashSet<EventField> queryFields = new LinkedHashSet<>();
queryFields.add(QueryField.channel); queryFields.add(EventField.channel);
queryFields.add(QueryField.value); queryFields.add(EventField.value);
request.setFields(queryFields); request.setFields(queryFields);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);

View File

@ -41,7 +41,7 @@ import ch.psi.daq.domain.query.operation.Aggregation;
import ch.psi.daq.domain.query.operation.AggregationDescriptor; import ch.psi.daq.domain.query.operation.AggregationDescriptor;
import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Extrema; import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.query.transform.ValueTransformationSequence;
import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation;
import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation;
@ -94,11 +94,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
101), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -172,12 +172,12 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
101), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addField(QueryField.value); request.addField(EventField.value);
AggregationDescriptor aggregation = new AggregationDescriptor(AggregationType.value); AggregationDescriptor aggregation = new AggregationDescriptor(AggregationType.value);
aggregation.setNrOfBins(1); aggregation.setNrOfBins(1);
@ -237,11 +237,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
101), 101),
TEST_CHANNEL_02, TEST_CHANNEL_01); TEST_CHANNEL_02, TEST_CHANNEL_01);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -314,11 +314,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
101), 101),
TEST_CHANNEL_01); TEST_CHANNEL_01);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -372,8 +372,8 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
new AggregationDescriptor().setNrOfBins(2), new AggregationDescriptor().setNrOfBins(2),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.eventCount); request.addField(EventField.eventCount);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -2230,11 +2230,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
100, 100,
101), 101),
channelName); channelName);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(
@ -2277,11 +2277,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
100, 100,
101), 101),
channelName); channelName);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(
@ -2321,11 +2321,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem
100, 100,
101), 101),
channelName, channelName2); channelName, channelName2);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.setMapping(new Mapping()); request.setMapping(new Mapping());
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(

View File

@ -37,7 +37,7 @@ import ch.psi.daq.domain.query.operation.AggregationDescriptor;
import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.AggregationType;
import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Compression;
import ch.psi.daq.domain.query.operation.Extrema; import ch.psi.daq.domain.query.operation.Extrema;
import ch.psi.daq.domain.query.operation.QueryField; import ch.psi.daq.domain.query.operation.EventField;
import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.query.transform.ValueTransformationSequence;
import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation;
import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation;
@ -289,11 +289,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
100, 100,
101), 101),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -350,8 +350,8 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
199), 199),
new AggregationDescriptor().setNrOfBins(2), new AggregationDescriptor().setNrOfBins(2),
TEST_CHANNEL_NAMES); TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.eventCount); request.addField(EventField.eventCount);
String content = mapper.writeValueAsString(request); String content = mapper.writeValueAsString(request);
System.out.println(content); System.out.println(content);
@ -1210,11 +1210,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
100, 100,
101), 101),
channelName); channelName);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(
ValueTransformationSequence.ALL_CHANNELS, ValueTransformationSequence.ALL_CHANNELS,
@ -1256,11 +1256,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
100, 100,
101), 101),
channelName); channelName);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(
channelName, channelName,
@ -1299,11 +1299,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements
100, 100,
101), 101),
channelName, channelName2); channelName, channelName2);
request.addField(QueryField.pulseId); request.addField(EventField.pulseId);
request.addField(QueryField.globalSeconds); request.addField(EventField.globalSeconds);
request.addField(QueryField.globalMillis); request.addField(EventField.globalMillis);
request.addField(QueryField.iocSeconds); request.addField(EventField.iocSeconds);
request.addField(QueryField.iocMillis); request.addField(EventField.iocMillis);
request.addValueTransformation( request.addValueTransformation(
new ValueTransformationSequence( new ValueTransformationSequence(
null, null,

View File

@ -0,0 +1,473 @@
package ch.psi.daq.test.queryrest.controller;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import org.junit.After;
import org.junit.Test;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.bsread.message.Type;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.events.ChannelConfiguration;
import ch.psi.daq.domain.events.impl.ChannelConfigurationImpl;
import ch.psi.daq.domain.json.ChannelConfigurations;
import ch.psi.daq.domain.json.ChannelConfigurationsList;
import ch.psi.daq.domain.query.DAQConfigQuery;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest;
import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
/**
* Tests the {@link DaqController} implementation.
*/
public class QueryRestControllerChannelConfigurationTest extends AbstractDaqRestTest {
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT)
private Backend backend;
private ObjectMapper objectMapper = new ObjectMapper();
@After
public void tearDown() throws Exception {}
@Test
public void testChannelConfigurationQuery_01() throws Exception {
DAQConfigQuery query = new DAQConfigQuery(
new RequestRangePulseId(
100,
101),
backend.getName() + "1", backend.getName() + "2");
String content = mapper.writeValueAsString(query);
System.out.println(content);
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
ChannelConfigurationsList channelConfigurations =
objectMapper.readValue(response, ChannelConfigurationsList.class);
assertEquals(2, channelConfigurations.size());
ChannelConfigurations configs = channelConfigurations.get(0);
assertEquals(backend.getName() + "1", configs.getChannel().getName());
assertEquals(backend, configs.getChannel().getBackend());
List<ChannelConfiguration> configsList = configs.getConfigs().collect(Collectors.toList());
assertEquals(2, configsList.size());
ChannelConfiguration config = configsList.get(0);
assertEquals(backend.getName() + "1", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configsList.get(1);
assertEquals(backend.getName() + "1", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
configs = channelConfigurations.get(1);
assertEquals(backend.getName() + "2", configs.getChannel().getName());
assertEquals(backend, configs.getChannel().getBackend());
configsList = configs.getConfigs().collect(Collectors.toList());
assertEquals(2, configsList.size());
config = configsList.get(0);
assertEquals(backend.getName() + "2", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configsList.get(1);
assertEquals(backend.getName() + "2", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
// test if backend and channel/name not set
this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(backend.getName() + "1"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].name").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].backend").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].type").value(config.getType()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].shape[0]").value(config.getShape()[0]))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].name").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].backend").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].type").value(config.getType()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].name").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].backend").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].type").value(config.getType()))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].name").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].backend").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].type").value(config.getType()))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[2]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[2]").doesNotExist());
}
@Test
public void testChannelConfigurationQuery_02() throws Exception {
DAQConfigQuery query = new DAQConfigQuery(
new RequestRangePulseId(
100,
101),
backend.getName() + "1", backend.getName() + "2");
query.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(query);
System.out.println(content);
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_QUERY_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
ChannelConfigurationsList channelConfigurations =
objectMapper.readValue(response, ChannelConfigurationsList.class);
assertEquals(2, channelConfigurations.size());
ChannelConfigurations configs = channelConfigurations.get(0);
assertEquals(backend.getName() + "1", configs.getChannel().getName());
assertEquals(backend, configs.getChannel().getBackend());
List<ChannelConfiguration> configsList = configs.getConfigs().collect(Collectors.toList());
assertEquals(2, configsList.size());
ChannelConfiguration config = configsList.get(0);
assertEquals(backend.getName() + "1", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configsList.get(1);
assertEquals(backend.getName() + "1", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
configs = channelConfigurations.get(1);
assertEquals(backend.getName() + "2", configs.getChannel().getName());
assertEquals(backend, configs.getChannel().getBackend());
configsList = configs.getConfigs().collect(Collectors.toList());
assertEquals(2, configsList.size());
config = configsList.get(0);
assertEquals(backend.getName() + "2", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configsList.get(1);
assertEquals(backend.getName() + "2", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), config.getPulseId());
assertArrayEquals(new int[] {1}, config.getShape());
assertEquals(Type.Int32.getKey(), config.getType());
assertEquals("unknown", config.getSource());
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
}
@Test
public void testChannelConfigurations_01() throws Exception {
ChannelConfigurationsRequest request = new ChannelConfigurationsRequest(
"int32");
String content = mapper.writeValueAsString(request);
System.out.println(content);
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_CHANNELS_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
List<ChannelConfigurationsResponse> channelConfigurations =
objectMapper.readValue(response, new TypeReference<List<ChannelConfigurationsResponse>>() {});
assertEquals(3, channelConfigurations.size());
ChannelConfigurationsResponse configResponse = channelConfigurations.get(0);
assertEquals(backend, configResponse.getBackend());
List<ChannelConfiguration> configs = configResponse.getChannels().collect(Collectors.toList());
assertEquals(4, configs.size());
ChannelConfiguration config = configs.get(0);
assertEquals("Int32Scalar", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("int32", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {1}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(1);
assertEquals("Int32Waveform", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("int32", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {8}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(2);
assertEquals("UInt32Scalar", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("uint32", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {1}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(3);
assertEquals("UInt32Waveform", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("uint32", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {8}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
}
@Test
public void testChannelConfigurations_02() throws Exception {
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.get(DomainConfig.PATH_CHANNELS_CONFIG + "/int16")
.contentType(MediaType.APPLICATION_JSON))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
List<ChannelConfigurationsResponse> channelConfigurations =
objectMapper.readValue(response, new TypeReference<List<ChannelConfigurationsResponse>>() {});
assertEquals(3, channelConfigurations.size());
ChannelConfigurationsResponse configResponse = channelConfigurations.get(0);
assertEquals(backend, configResponse.getBackend());
List<ChannelConfiguration> configs = configResponse.getChannels().collect(Collectors.toList());
assertEquals(4, configs.size());
ChannelConfiguration config = configs.get(0);
assertEquals("Int16Scalar", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("int16", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {1}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(1);
assertEquals("Int16Waveform", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("int16", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {8}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(2);
assertEquals("UInt16Scalar", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("uint16", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {1}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
config = configs.get(3);
assertEquals("UInt16Waveform", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("uint16", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {8}, config.getShape());
assertNull(config.getGlobalTime());
assertTrue(config.getPulseId() == 0);
assertEquals(0, config.getModulo());
assertEquals(0, config.getOffset());
}
@Test
public void testChannelConfigurations_03() throws Exception {
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.get(DomainConfig.PATH_CHANNEL_CONFIG + "/Int16Waveform")
.contentType(MediaType.APPLICATION_JSON))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
ChannelConfiguration config =
objectMapper.readValue(response, ChannelConfigurationImpl.class);
assertEquals("Int16Waveform", config.getChannel());
assertEquals(backend, config.getBackend());
assertEquals("int16", config.getType());
assertEquals("unknown", config.getSource());
assertArrayEquals(new int[] {8}, config.getShape());
}
@Test
public void testChannelConfigurationsBackendOrder() throws Exception {
ChannelConfigurationsRequest request = new ChannelConfigurationsRequest(
"int64", Ordering.asc, backend);
String content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_CHANNELS_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].name").value("Int64Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].name").value("Int64Waveform"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].name").value("UInt64Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].name").value("UInt64Waveform"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[4]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").doesNotExist());
request = new ChannelConfigurationsRequest(
"int64", Ordering.desc, backend);
content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_CHANNELS_CONFIG)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].name").value("UInt64Waveform"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].name").value("UInt64Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].name").value("Int64Waveform"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].name").value("Int64Scalar"))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].backend").value(backend.getName()))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].channel").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[4]").doesNotExist())
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").doesNotExist());
}
}

View File

@ -1,178 +0,0 @@
package ch.psi.daq.test.queryrest.controller;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import org.junit.After;
import org.junit.Test;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
import com.fasterxml.jackson.databind.ObjectMapper;
import ch.psi.bsread.message.Type;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.backend.Backend;
import ch.psi.daq.domain.config.DomainConfig;
import ch.psi.daq.domain.json.channels.info.ChannelInfo;
import ch.psi.daq.domain.json.channels.info.ChannelInfos;
import ch.psi.daq.domain.json.channels.info.ChannelInfosList;
import ch.psi.daq.domain.query.ChannelNameRequest;
import ch.psi.daq.domain.request.range.RequestRangePulseId;
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
/**
* Tests the {@link DaqController} implementation.
*/
public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT)
private Backend backend;
private ObjectMapper objectMapper = new ObjectMapper();
@After
public void tearDown() throws Exception {}
@Test
public void testChannelInfoQuery_01() throws Exception {
ChannelNameRequest query = new ChannelNameRequest(
new RequestRangePulseId(
100,
101),
backend.getName() + "1", backend.getName() + "2");
String content = mapper.writeValueAsString(query);
System.out.println(content);
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_CHANNELS_INFO)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
List<? extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
assertEquals(2, infosList.size());
ChannelInfos cInfos = infosList.get(0);
assertEquals(backend.getName() + "1", cInfos.getChannel().getName());
assertEquals(backend, cInfos.getChannel().getBackend());
List<ChannelInfo> infos = cInfos.getChannelInfos().collect(Collectors.toList());
assertEquals(2, infos.size());
ChannelInfo info = infos.get(0);
assertEquals(backend.getName() + "1", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
info = infos.get(1);
assertEquals(backend.getName() + "1", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
cInfos = infosList.get(1);
assertEquals(backend.getName() + "2", cInfos.getChannel().getName());
assertEquals(backend, cInfos.getChannel().getBackend());
infos = cInfos.getChannelInfos().collect(Collectors.toList());
assertEquals(2, infos.size());
info = infos.get(0);
assertEquals(backend.getName() + "2", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
info = infos.get(1);
assertEquals(backend.getName() + "2", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
}
@Test
public void testChannelInfoQuery_02() throws Exception {
ChannelNameRequest query = new ChannelNameRequest(
new RequestRangePulseId(
100,
101),
backend.getName() + "1", backend.getName() + "2");
query.setOrdering(Ordering.desc);
String content = mapper.writeValueAsString(query);
System.out.println(content);
MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders
.post(DomainConfig.PATH_CHANNELS_INFO)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andReturn();
String response = result.getResponse().getContentAsString();
System.out.println("Response: " + response);
// test conversion used in DAQProcessing
List<? extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
assertEquals(2, infosList.size());
ChannelInfos cInfos = infosList.get(0);
assertEquals(backend.getName() + "1", cInfos.getChannel().getName());
assertEquals(backend, cInfos.getChannel().getBackend());
List<ChannelInfo> infos = cInfos.getChannelInfos().collect(Collectors.toList());
assertEquals(2, infos.size());
ChannelInfo info = infos.get(0);
assertEquals(backend.getName() + "1", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
info = infos.get(1);
assertEquals(backend.getName() + "1", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
cInfos = infosList.get(1);
assertEquals(backend.getName() + "2", cInfos.getChannel().getName());
assertEquals(backend, cInfos.getChannel().getBackend());
infos = cInfos.getChannelInfos().collect(Collectors.toList());
assertEquals(2, infos.size());
info = infos.get(0);
assertEquals(backend.getName() + "2", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
info = infos.get(1);
assertEquals(backend.getName() + "2", info.getChannel());
assertEquals(backend, info.getBackend());
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
assertArrayEquals(new int[] {1}, info.getShape());
assertEquals(Type.Int32.getKey(), info.getType());
}
}

View File

@ -8,7 +8,7 @@ query.hazelcast.node=true
# the base for the keyspaces # the base for the keyspaces
domain.keyspace.base=daq_query_test domain.keyspace.base=daq_query_test
channelname.cache.reload.period=-1 channels.cache.reload.period=-1
query.min.time=1970-01-01T00:00:00.000000000+00:00 query.min.time=1970-01-01T00:00:00.000000000+00:00