diff --git a/Readme.md b/Readme.md index b4677d3..2751bc8 100644 --- a/Readme.md +++ b/Readme.md @@ -73,7 +73,7 @@ POST https://:/channels ##### Explanation - **regex**: Reqular expression used to filter channel names. In case this value is undefined, no filter will be applied. Filtering is done using JAVA's [Pattern](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html), more precisely [Matcher.find()](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#find--)). -- **backends**: Array of backends to access (values: sf-databuffer|sf-archiverappliance). In case this value is undefined, all backends will be queried for their channels. +- **backends**: Array of backends to access (values: sf-databuffer|sf-imagebuffer|sf-archiverappliance). In case this value is undefined, all backends will be queried for their channels. - **ordering**: The ordering of the channel names (values: **none**|asc|desc). - **reload**: Forces the server to reload cached channel names (values: **false**|true). diff --git a/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java b/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java index f33da16..6dcd6b6 100644 --- a/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java +++ b/src/main/java/ch/psi/daq/queryrest/config/QueryRestConfig.java @@ -37,25 +37,27 @@ import ch.psi.daq.domain.events.ChannelConfiguration; import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.ConfigField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.operation.aggregation.extrema.AbstractExtremaMeta; import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.request.validate.RequestProviderValidator; import ch.psi.daq.query.analyzer.BackendQueryAnalyzerImpl; import ch.psi.daq.query.config.QueryConfig; -import ch.psi.daq.queryrest.controller.validator.QueryValidator; -import ch.psi.daq.queryrest.model.ChannelRenameFilterMixin; +import ch.psi.daq.queryrest.controller.validator.ConfigQueryValidator; +import ch.psi.daq.queryrest.controller.validator.EventQueryValidator; +import ch.psi.daq.queryrest.model.HistoricChannelConfigurationPropertyFilterMixin; import ch.psi.daq.queryrest.model.PropertyFilterMixin; import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.query.QueryManagerImpl; import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn; import ch.psi.daq.queryrest.response.csv.CSVResponseStreamWriter; +import ch.psi.daq.queryrest.response.formatter.AnyResponseFormatter; +import ch.psi.daq.queryrest.response.formatter.DAQConfigQueryResponseFormatter; +import ch.psi.daq.queryrest.response.formatter.DAQQueriesResponseFormatter; import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter; -import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter; import ch.psi.daq.queryrest.response.msgpack.MsgPackResponseStreamWriter; -import ch.psi.daq.queryrest.response.msgpack.MsgPackTableResponseStreamWriter; import ch.psi.daq.queryrest.response.smile.SmileResponseStreamWriter; -import ch.psi.daq.queryrest.response.smile.SmileTableResponseStreamWriter; @Configuration @Import(value = DomainConfigCORS.class) @@ -65,9 +67,12 @@ import ch.psi.daq.queryrest.response.smile.SmileTableResponseStreamWriter; "file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true) public class QueryRestConfig { // extends WebMvcConfigurerAdapter { - private static final String QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS = "queryrest.default.response.aggregations"; + private static final String QUERYREST_RESPONSE_FIELDS_EVENT_QUERY = "queryrest.response.fields.event.query"; + private static final String QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS = + "queryrest.response.fields.event.query.aggregations"; - private static final String QUERYREST_DEFAULT_RESPONSE_FIELDS = "queryrest.default.response.fields"; + private static final String QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY = "queryrest.response.fields.config.query"; + private static final String QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC = "queryrest.response.fields.config.historic"; // a nested configuration // this guarantees that the ordering of the properties file is as expected @@ -87,13 +92,21 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { public static final String BEAN_NAME_QUERY_MANAGER = "queryManager"; public static final String BEAN_NAME_QUERY_ANALIZER_FACTORY = "queryAnalizerFactory"; - public static final String BEAN_NAME_QUERY_VALIDATOR = "queryValidator"; + public static final String BEAN_NAME_EVENT_QUERY_VALIDATOR = "eventQueryValidator"; + public static final String BEAN_NAME_CONFIG_QUERY_VALIDATOR = "configQueryValidator"; public static final String BEAN_NAME_REQUEST_PROVIDER_VALIDATOR = "requestProviderValidator"; public static final String BEAN_NAME_JSON_FACTORY = "jsonFactory"; public static final String BEAN_NAME_MSG_PACK_FACTORY = "msgPackFactory"; public static final String BEAN_NAME_SMILE_FACTORY = "smileFactory"; - public static final String BEAN_NAME_DEFAULT_RESPONSE_FIELDS = "defaultResponseFields"; - public static final String BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS = "defaultResponseAggregations"; + public static final String BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS = "defaultEventResponseFields"; + public static final String BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS = "defaultEventResponseAggregations"; + public static final String BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY = "configResponseFieldsQuery"; + public static final String BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC = "configResponseFieldsHistoric"; + public static final String BEAN_NAME_FORMATTER_DAQ_QUERIES = "formatterDAQQueries"; + public static final String BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY = "formatterDAQConfigQuery"; + public static final String BEAN_NAME_FORMATTER_ANY = "formatterAny"; + public static final String BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION = + "formatterHistoricChannelConfiguration"; @Resource private ApplicationContext context; @@ -127,8 +140,7 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { objectMapper.addMixIn(AbstractExtremaMeta.class, PropertyFilterMixin.class); objectMapper.addMixIn(EnumMap.class, PropertyFilterMixin.class); - objectMapper.addMixIn(ChannelConfiguration.class, PropertyFilterMixin.class); - objectMapper.addMixIn(ChannelConfiguration.class, ChannelRenameFilterMixin.class); + objectMapper.addMixIn(ChannelConfiguration.class, HistoricChannelConfigurationPropertyFilterMixin.class); objectMapper.addMixIn(Response.class, PolymorphicResponseMixIn.class); } @@ -188,36 +200,18 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { return new JSONResponseStreamWriter(); } - @Bean - @Lazy - public JSONTableResponseStreamWriter jsonTableResponseStreamWriter() { - return new JSONTableResponseStreamWriter(); - } - @Bean @Lazy public MsgPackResponseStreamWriter msgPackResponseStreamWriter() { return new MsgPackResponseStreamWriter(); } - @Bean - @Lazy - public MsgPackTableResponseStreamWriter msgPackTableResponseStreamWriter() { - return new MsgPackTableResponseStreamWriter(); - } - @Bean @Lazy public SmileResponseStreamWriter smileResponseStreamWriter() { return new SmileResponseStreamWriter(); } - @Bean - @Lazy - public SmileTableResponseStreamWriter smileTableResponseStreamWriter() { - return new SmileTableResponseStreamWriter(); - } - @Bean @Lazy public CSVResponseStreamWriter csvResponseStreamWriter() { @@ -230,32 +224,73 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { return new QueryManagerImpl(); } - @Bean(name = BEAN_NAME_DEFAULT_RESPONSE_FIELDS) + @Bean(name = BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS) @Lazy - public Set defaultResponseFields() { + public Set defaultEventResponseFields() { String[] responseFields = - StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_FIELDS)); - LOGGER.debug("Load '{}={}'", BEAN_NAME_DEFAULT_RESPONSE_FIELDS, Arrays.toString(responseFields)); + StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_EVENT_QUERY)); + LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_EVENT_QUERY, Arrays.toString(responseFields)); // preserve order - LinkedHashSet defaultResponseFields = new LinkedHashSet<>(responseFields.length); + LinkedHashSet defaultResponseFields = new LinkedHashSet<>(responseFields.length); for (String field : responseFields) { try { - defaultResponseFields.add(QueryField.valueOf(field)); + defaultResponseFields.add(EventField.valueOf(field)); } catch (Exception e) { - LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_DEFAULT_RESPONSE_FIELDS, e); + LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_EVENT_QUERY, e); } } return defaultResponseFields; } - @Bean(name = BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS) + @Bean(name = BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY) + @Lazy + public Set configResponseFieldsQuery() { + String[] responseFields = + StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY)); + LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY, Arrays.toString(responseFields)); + + // preserve order + LinkedHashSet extractedResponseFields = new LinkedHashSet<>(responseFields.length); + for (String field : responseFields) { + try { + extractedResponseFields.add(ConfigField.valueOf(field)); + } catch (Exception e) { + LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_CONFIG_QUERY, e); + } + } + + return extractedResponseFields; + } + + @Bean(name = BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC) + @Lazy + public Set configResponseFieldsHistoric() { + String[] responseFields = + StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC)); + LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC, Arrays.toString(responseFields)); + + // preserve order + LinkedHashSet extractedResponseFields = new LinkedHashSet<>(responseFields.length); + for (String field : responseFields) { + try { + extractedResponseFields.add(ConfigField.valueOf(field)); + } catch (Exception e) { + LOGGER.error("Field '{}' in '{}' is invalid.", field, QUERYREST_RESPONSE_FIELDS_CONFIG_HISTORIC, e); + } + } + + return extractedResponseFields; + } + + @Bean(name = BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS) @Lazy public Set defaultResponseAggregations() { String[] responseAggregations = - StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS)); - LOGGER.debug("Load '{}={}'", BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Arrays.toString(responseAggregations)); + StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS)); + LOGGER.debug("Load '{}={}'", QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS, + Arrays.toString(responseAggregations)); // preserve order LinkedHashSet defaultResponseAggregations = new LinkedHashSet<>(responseAggregations.length); @@ -263,7 +298,8 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { try { defaultResponseAggregations.add(Aggregation.valueOf(aggregation)); } catch (Exception e) { - LOGGER.error("Aggregation '{}' in '{}' is invalid.", aggregation, QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS, + LOGGER.error("Aggregation '{}' in '{}' is invalid.", aggregation, + QUERYREST_RESPONSE_FIELDS_EVENT_QUERY_AGGREGATIONS, e); } } @@ -271,10 +307,16 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { return defaultResponseAggregations; } - @Bean(name = BEAN_NAME_QUERY_VALIDATOR) + @Bean(name = BEAN_NAME_EVENT_QUERY_VALIDATOR) @Lazy - public Validator queryValidator() { - return new QueryValidator(); + public Validator eventQueryValidator() { + return new EventQueryValidator(); + } + + @Bean(name = BEAN_NAME_CONFIG_QUERY_VALIDATOR) + @Lazy + public Validator configQueryValidator() { + return new ConfigQueryValidator(); } @Bean(name = BEAN_NAME_REQUEST_PROVIDER_VALIDATOR) @@ -282,4 +324,32 @@ public class QueryRestConfig { // extends WebMvcConfigurerAdapter { public Validator requestProviderValidator() { return new RequestProviderValidator(); } + + @Bean(name = BEAN_NAME_FORMATTER_DAQ_QUERIES) + @Lazy + public DAQQueriesResponseFormatter daqQueriesFormatter() { + return new DAQQueriesResponseFormatter(); + } + + @Bean(name = BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY) + @Lazy + public DAQConfigQueryResponseFormatter daqConfigQueryFormatter() { + return new DAQConfigQueryResponseFormatter(); + } + + @Bean(name = BEAN_NAME_FORMATTER_ANY) + @Lazy + public AnyResponseFormatter anyFormatter() { + return new AnyResponseFormatter( + BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS, + BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC); + } + + @Bean(name = BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION) + @Lazy + public AnyResponseFormatter historicChannelConfigurationFormatter() { + return new AnyResponseFormatter( + BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS, + BEAN_NAME_CONFIG_RESPONSE_FIELDS_HISTORIC); + } } diff --git a/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java b/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java index da04b2f..4663f4f 100644 --- a/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java +++ b/src/main/java/ch/psi/daq/queryrest/controller/QueryRestController.java @@ -3,14 +3,16 @@ package ch.psi.daq.queryrest.controller; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Collection; import java.util.List; +import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; +import org.apache.commons.lang3.tuple.Triple; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; @@ -37,16 +39,19 @@ import com.google.common.collect.Lists; import ch.psi.daq.common.ordering.Ordering; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.channels.info.ChannelInfos; -import ch.psi.daq.domain.query.ChannelNameRequest; +import ch.psi.daq.domain.json.ChannelName; +import ch.psi.daq.domain.query.DAQConfigQuery; +import ch.psi.daq.domain.query.DAQConfigQueryElement; import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQuery; +import ch.psi.daq.domain.query.DAQQueryElement; +import ch.psi.daq.domain.query.backend.BackendQuery; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest; import ch.psi.daq.domain.query.channels.ChannelsRequest; -import ch.psi.daq.domain.query.channels.ChannelsResponse; import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.Compression; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.transform.image.color.ColorModelType; @@ -55,6 +60,9 @@ import ch.psi.daq.queryrest.config.QueryRestConfig; import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.response.AbstractHTTPResponse; import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn; +import ch.psi.daq.queryrest.response.formatter.AnyResponseFormatter; +import ch.psi.daq.queryrest.response.formatter.DAQConfigQueryResponseFormatter; +import ch.psi.daq.queryrest.response.formatter.DAQQueriesResponseFormatter; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; @RestController @@ -66,9 +74,14 @@ public class QueryRestController implements ApplicationContextAware { private ApplicationContext context; private ObjectMapper objectMapper; private QueryManager queryManager; - private Validator queryValidator; + private Validator eventQueryValidator; + private Validator configQueryValidator; private Validator requestProviderValidator; private Response defaultResponse = new JSONHTTPResponse(); + private AnyResponseFormatter anyFormatter; + private AnyResponseFormatter historicConfigFormatter; + private DAQConfigQueryResponseFormatter configQueryFormatter; + private DAQQueriesResponseFormatter daqQueriesFormatter; @SuppressWarnings("unchecked") @Override @@ -80,8 +93,17 @@ public class QueryRestController implements ApplicationContextAware { activeBackends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS_ACTIVE, Set.class); objectMapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); queryManager = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_MANAGER, QueryManager.class); - queryValidator = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_VALIDATOR, Validator.class); + eventQueryValidator = context.getBean(QueryRestConfig.BEAN_NAME_EVENT_QUERY_VALIDATOR, Validator.class); + configQueryValidator = context.getBean(QueryRestConfig.BEAN_NAME_CONFIG_QUERY_VALIDATOR, Validator.class); requestProviderValidator = context.getBean(QueryRestConfig.BEAN_NAME_REQUEST_PROVIDER_VALIDATOR, Validator.class); + + anyFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_ANY, AnyResponseFormatter.class); + historicConfigFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_HISTORIC_CHANNEL_CONFIGURATION, + AnyResponseFormatter.class); + configQueryFormatter = context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_DAQ_CONFIG_QUERY, + DAQConfigQueryResponseFormatter.class); + daqQueriesFormatter = + context.getBean(QueryRestConfig.BEAN_NAME_FORMATTER_DAQ_QUERIES, DAQQueriesResponseFormatter.class); } @InitBinder @@ -90,52 +112,126 @@ public class QueryRestController implements ApplicationContextAware { if (requestProviderValidator.supports(binder.getTarget().getClass())) { binder.addValidators(requestProviderValidator); } - if (queryValidator.supports(binder.getTarget().getClass())) { - binder.addValidators(queryValidator); + if (eventQueryValidator.supports(binder.getTarget().getClass())) { + binder.addValidators(eventQueryValidator); + } + if (configQueryValidator.supports(binder.getTarget().getClass())) { + binder.addValidators(configQueryValidator); } } } - @RequestMapping(value = DomainConfig.PATH_CHANNELS, method = {RequestMethod.GET, RequestMethod.POST}, - produces = {MediaType.APPLICATION_JSON_VALUE}) - public @ResponseBody List getChannels(@RequestBody(required = false) ChannelsRequest request) - throws Throwable { - List channels = queryManager.getChannels(request); - channels = channels.stream() - .filter(channelsResponse -> activeBackends.contains(channelsResponse.getBackend())) - .collect(Collectors.toList()); - return channels; - } - - /** - * Query specific channel names, and return only those. - * - * @param channelName part of (or full) channel name - * @return Collection of channel names matching the specified input channel name - * @throws Throwable in case something goes wrong - */ - @RequestMapping(value = DomainConfig.PATH_CHANNELS + "/{channelName}", method = {RequestMethod.GET}, - produces = {MediaType.APPLICATION_JSON_VALUE}) - public @ResponseBody Collection getChannels( - @PathVariable(value = "channelName") String channelName) - throws Throwable { - return getChannels(new ChannelsRequest(channelName)); - } - - /** - * Queries for channels info - * - * @param request the ChannelNameRequest - * @return Collection of ChannelInfos - * @throws Throwable in case something goes wrong - */ @RequestMapping( - value = DomainConfig.PATH_CHANNELS_INFO, + value = DomainConfig.PATH_CHANNELS, + method = {RequestMethod.GET, RequestMethod.POST}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannels(@RequestBody(required = false) ChannelsRequest request, + final HttpServletResponse res) + throws Throwable { + ((AbstractHTTPResponse) defaultResponse).respond( + context, + res, + null, + queryManager.getChannels(request), + anyFormatter); + } + + @RequestMapping( + value = DomainConfig.PATH_CHANNELS + "/{channel}", + method = {RequestMethod.GET}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannels( + @PathVariable(value = "channel") final String channelName, final HttpServletResponse res) + throws Throwable { + getChannels(new ChannelsRequest(channelName), res); + } + + @RequestMapping( + value = DomainConfig.PATH_CHANNELS_CONFIG, + method = {RequestMethod.GET, RequestMethod.POST}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannelConfigurations(@RequestBody(required = false) ChannelConfigurationsRequest request, + final HttpServletResponse res) throws Throwable { + ((AbstractHTTPResponse) defaultResponse).respond( + context, + res, + null, + queryManager.getChannelConfigurations(request), + historicConfigFormatter); + } + + @RequestMapping( + value = DomainConfig.PATH_CHANNELS_CONFIG + "/{channel}", + method = {RequestMethod.GET}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannelConfigurations( + @PathVariable(value = "channel") final String channelName, final HttpServletResponse res) + throws Throwable { + getChannelConfigurations(new ChannelConfigurationsRequest(channelName), res); + } + + @RequestMapping( + value = DomainConfig.PATH_CHANNEL_CONFIG, + method = {RequestMethod.POST}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannelConfiguration(@RequestBody final ChannelName channelName, final HttpServletResponse res) + throws Throwable { + ((AbstractHTTPResponse) defaultResponse).respond( + context, + res, + null, + queryManager.getChannelConfiguration(channelName), + historicConfigFormatter); + } + + @RequestMapping( + value = DomainConfig.PATH_CHANNEL_CONFIG + "/{channel}", + method = {RequestMethod.GET}, + produces = {MediaType.APPLICATION_JSON_VALUE}) + public void getChannelConfiguration( + @PathVariable(value = "channel") final String channelName, final HttpServletResponse res) + throws Throwable { + getChannelConfiguration(new ChannelName(channelName), res); + } + + @RequestMapping( + value = DomainConfig.PATH_QUERY_CONFIG, method = RequestMethod.POST, consumes = {MediaType.APPLICATION_JSON_VALUE}) - public @ResponseBody Collection executeChannelInfoQuery(@RequestBody ChannelNameRequest request) + public void executeDAQConfigQuery(@RequestBody @Valid final DAQConfigQuery query, final HttpServletResponse res) throws Throwable { - return queryManager.getChannelInfos(request); + try { + LOGGER.debug("Executing queries '{}'", query); + + final Response response = query.getResponseOrDefault(defaultResponse); + if (response instanceof AbstractHTTPResponse) { + LOGGER.debug("Executing config query '{}'", query); + final AbstractHTTPResponse httpResponse = ((AbstractHTTPResponse) response); + + httpResponse.validateQuery(query); + // execute query + final Entry>> result = + queryManager.queryConfigs(query); + + httpResponse.respond( + context, + res, + query, + result, + configQueryFormatter); + } else { + final String message = + String.format( + "Expecting Response of type '%s' but received '%s'. Check JSON deserialization defined in '%s'", + AbstractHTTPResponse.class.getName(), response.getClass().getName(), + PolymorphicResponseMixIn.class.getName()); + LOGGER.error(message); + throw new IllegalArgumentException(message); + } + } catch (Exception e) { + LOGGER.error("Failed to execute config query '{}'.", query, e); + throw e; + } } /** @@ -144,13 +240,12 @@ public class QueryRestController implements ApplicationContextAware { * @param jsonBody The {@link DAQQuery} properties sent as a JSON string, i.e. this is the * stringified body of the POST request method * @param res the current {@link HttpServletResponse} instance - * @throws Exception if reading the JSON string fails or if the subsequent call to - * {@link #executeQuery(DAQQuery, HttpServletResponse)} fails + * @throws Exception if reading the JSON string fails or if the subsequent call fails */ @RequestMapping( value = DomainConfig.PATH_QUERY, method = RequestMethod.GET) - public void executeQueryBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { + public void executeDAQQueryBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { DAQQuery query; try { query = objectMapper.readValue(jsonBody, DAQQuery.class); @@ -165,13 +260,13 @@ public class QueryRestController implements ApplicationContextAware { if (requestProviderValidator.supports(query.getClass())) { requestProviderValidator.validate(query, errors); } - if (queryValidator.supports(query.getClass())) { - queryValidator.validate(query, errors); + if (eventQueryValidator.supports(query.getClass())) { + eventQueryValidator.validate(query, errors); } final List allErrors = errors.getAllErrors(); if (allErrors.isEmpty()) { - executeQuery(query, res); + executeDAQQuery(query, res); } else { final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString()); LOGGER.error(message); @@ -190,8 +285,8 @@ public class QueryRestController implements ApplicationContextAware { value = DomainConfig.PATH_QUERY, method = RequestMethod.POST, consumes = {MediaType.APPLICATION_JSON_VALUE}) - public void executeQuery(@RequestBody @Valid DAQQuery query, HttpServletResponse res) throws Exception { - executeQueries(new DAQQueries(query), res); + public void executeDAQQuery(@RequestBody @Valid DAQQuery query, HttpServletResponse res) throws Exception { + executeDAQQueries(new DAQQueries(query), res); } /** @@ -200,13 +295,12 @@ public class QueryRestController implements ApplicationContextAware { * @param jsonBody The {@link DAQQueries} properties sent as a JSON string, i.e. this is the * stringified body of the POST request method * @param res the current {@link HttpServletResponse} instance - * @throws Exception if reading the JSON string fails or if the subsequent call to - * {@link #executeQueries(DAQQueries, HttpServletResponse)} fails + * @throws Exception if reading the JSON string fails or if the subsequent call fails */ @RequestMapping( value = DomainConfig.PATH_QUERIES, method = RequestMethod.GET) - public void executeQueriesBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { + public void executeDAQQueriesBodyAsString(@RequestParam String jsonBody, HttpServletResponse res) throws Exception { DAQQueries queries; try { queries = objectMapper.readValue(jsonBody, DAQQueries.class); @@ -221,13 +315,13 @@ public class QueryRestController implements ApplicationContextAware { if (requestProviderValidator.supports(queries.getClass())) { requestProviderValidator.validate(queries, errors); } - if (queryValidator.supports(queries.getClass())) { - queryValidator.validate(queries, errors); + if (eventQueryValidator.supports(queries.getClass())) { + eventQueryValidator.validate(queries, errors); } final List allErrors = errors.getAllErrors(); if (allErrors.isEmpty()) { - executeQueries(queries, res); + executeDAQQueries(queries, res); } else { final String message = String.format("Could not parse '%s' due to '%s'.", jsonBody, errors.toString()); LOGGER.error(message); @@ -251,15 +345,28 @@ public class QueryRestController implements ApplicationContextAware { value = DomainConfig.PATH_QUERIES, method = RequestMethod.POST, consumes = {MediaType.APPLICATION_JSON_VALUE}) - public void executeQueries(@RequestBody @Valid DAQQueries queries, HttpServletResponse res) throws Exception { + public void executeDAQQueries(@RequestBody @Valid DAQQueries queries, HttpServletResponse res) throws Exception { try { LOGGER.debug("Executing queries '{}'", queries); - Response response = queries.getResponseOrDefault(defaultResponse); + final Response response = queries.getResponseOrDefault(defaultResponse); if (response instanceof AbstractHTTPResponse) { - ((AbstractHTTPResponse) response).respond(context, queries, res); + LOGGER.debug("Executing query '{}'", queries); + final AbstractHTTPResponse httpResponse = ((AbstractHTTPResponse) response); + + httpResponse.validateQuery(queries); + // execute query + final List>>> result = + queryManager.queryEvents(queries); + + httpResponse.respond( + context, + res, + queries, + result, + daqQueriesFormatter); } else { - String message = + final String message = String.format( "Expecting Response of type '%s' but received '%s'. Check JSON deserialization defined in '%s'", AbstractHTTPResponse.class.getName(), response.getClass().getName(), @@ -267,8 +374,6 @@ public class QueryRestController implements ApplicationContextAware { LOGGER.error(message); throw new IllegalArgumentException(message); } - - } catch (Exception e) { LOGGER.error("Failed to execute query '{}'.", queries, e); throw e; @@ -280,7 +385,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link Ordering}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/ordering", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/ordering", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getOrderingValues() { return Lists.newArrayList(Ordering.values()); @@ -291,21 +398,25 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link Ordering}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/responseformat", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/responseformat", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getResponseFormatValues() { return Lists.newArrayList(ResponseFormat.values()); } /** - * Returns the current list of {@link QueryField}s available. + * Returns the current list of {@link EventField}s available. * - * @return list of {@link QueryField}s as String array + * @return list of {@link EventField}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/queryfields", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/queryfields", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) - public @ResponseBody List getQueryFieldValues() { - return Arrays.stream(QueryField.values()) + public @ResponseBody List getQueryFieldValues() { + return Arrays.stream(EventField.values()) .filter(queryField -> queryField.isPublish()) .collect(Collectors.toList()); } @@ -315,7 +426,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link Aggregation}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregations", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregations", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getAggregationValues() { return Lists.newArrayList(Aggregation.values()); @@ -326,7 +439,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link AggregationType}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregationtypes", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregationtypes", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getAggregationTypeValues() { return Lists.newArrayList(AggregationType.values()); @@ -337,7 +452,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link Backend}s as String array */ - @RequestMapping(value = DomainConfig.PATH_BACKENDS, method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_BACKENDS, + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getBackendValues() { return Backend.getBackends().stream() @@ -350,7 +467,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link Compression}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/compression", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/compression", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getCompressionValues() { return Lists.newArrayList(Compression.values()); @@ -361,7 +480,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link ValueAggregation}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/valueaggregations", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/valueaggregations", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getValueAggregations() { return Lists.newArrayList(ValueAggregation.values()); @@ -372,7 +493,9 @@ public class QueryRestController implements ApplicationContextAware { * * @return list of {@link ValueAggregation}s as String array */ - @RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/colormodeltypes", method = {RequestMethod.GET}, + @RequestMapping( + value = DomainConfig.PATH_PARAMETERS_ROOT + "/colormodeltypes", + method = {RequestMethod.GET}, produces = {MediaType.APPLICATION_JSON_VALUE}) public @ResponseBody List getColorModelTypes() { return Lists.newArrayList(ColorModelType.values()); diff --git a/src/main/java/ch/psi/daq/queryrest/controller/validator/ConfigQueryValidator.java b/src/main/java/ch/psi/daq/queryrest/controller/validator/ConfigQueryValidator.java new file mode 100644 index 0000000..ac33545 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/controller/validator/ConfigQueryValidator.java @@ -0,0 +1,48 @@ +package ch.psi.daq.queryrest.controller.validator; + +import java.util.LinkedHashSet; +import java.util.Set; + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.validation.Errors; +import org.springframework.validation.Validator; + +import ch.psi.daq.domain.backend.Backend; +import ch.psi.daq.domain.config.DomainConfig; +import ch.psi.daq.domain.query.DAQConfigQuery; +import ch.psi.daq.domain.query.operation.ConfigField; +import ch.psi.daq.queryrest.config.QueryRestConfig; + +public class ConfigQueryValidator implements Validator, ApplicationContextAware { + private Set queryResponseFields; + + @SuppressWarnings("unchecked") + @Override + public void setApplicationContext(ApplicationContext context) throws BeansException { + final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); + context = backend.getApplicationContext(); + + queryResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_CONFIG_RESPONSE_FIELDS_QUERY, Set.class); + } + + @Override + public boolean supports(final Class clazz) { + return DAQConfigQuery.class.isAssignableFrom(clazz); + } + + @Override + public void validate(final Object target, final Errors errors) { + if (target instanceof DAQConfigQuery) { + this.checkElement((DAQConfigQuery) target, errors); + } + } + + private void checkElement(final DAQConfigQuery query, final Errors errors) { + // set default values (if not set) + if (query.getFields() == null || query.getFields().isEmpty()) { + query.setFields(new LinkedHashSet<>(queryResponseFields)); + } + } +} diff --git a/src/main/java/ch/psi/daq/queryrest/controller/validator/QueryValidator.java b/src/main/java/ch/psi/daq/queryrest/controller/validator/EventQueryValidator.java similarity index 92% rename from src/main/java/ch/psi/daq/queryrest/controller/validator/QueryValidator.java rename to src/main/java/ch/psi/daq/queryrest/controller/validator/EventQueryValidator.java index 3228fcd..82d2346 100644 --- a/src/main/java/ch/psi/daq/queryrest/controller/validator/QueryValidator.java +++ b/src/main/java/ch/psi/daq/queryrest/controller/validator/EventQueryValidator.java @@ -16,14 +16,14 @@ import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.transform.ExecutionEnvironment; import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.request.Request; import ch.psi.daq.queryrest.config.QueryRestConfig; -public class QueryValidator implements Validator, ApplicationContextAware { - private Set defaultResponseFields; +public class EventQueryValidator implements Validator, ApplicationContextAware { + private Set defaultResponseFields; private Set defaultResponseAggregations; @SuppressWarnings("unchecked") @@ -32,8 +32,8 @@ public class QueryValidator implements Validator, ApplicationContextAware { final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); context = backend.getApplicationContext(); - defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS, Set.class); - defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class); + defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_FIELDS, Set.class); + defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class); } @Override @@ -97,7 +97,7 @@ public class QueryValidator implements Validator, ApplicationContextAware { if (query.getValueTransformations() != null && !query.getValueTransformations().isEmpty()) { // without this field, json will not contain transformedValue - query.addField(QueryField.transformedValue); + query.addField(EventField.transformedValue); for (final ValueTransformationSequence transformationSequence : query.getValueTransformations()) { transformationSequence.setExecutionEnvironment(ExecutionEnvironment.QUERYING); diff --git a/src/main/java/ch/psi/daq/queryrest/model/ChannelRenameFilterMixin.java b/src/main/java/ch/psi/daq/queryrest/model/ChannelRenameFilterMixin.java deleted file mode 100644 index 90cdd3d..0000000 --- a/src/main/java/ch/psi/daq/queryrest/model/ChannelRenameFilterMixin.java +++ /dev/null @@ -1,15 +0,0 @@ -package ch.psi.daq.queryrest.model; - -import com.fasterxml.jackson.annotation.JsonFilter; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * Kind of marker for ObjectMapper MixIn - */ -@JsonFilter("channelRenameFilter") -public interface ChannelRenameFilterMixin { - public static final String FILTER_NAME = "channelRenameFilter"; - - @JsonProperty("name") - String getChannel(); -} diff --git a/src/main/java/ch/psi/daq/queryrest/model/HistoricChannelConfigurationPropertyFilterMixin.java b/src/main/java/ch/psi/daq/queryrest/model/HistoricChannelConfigurationPropertyFilterMixin.java new file mode 100644 index 0000000..ceac608 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/model/HistoricChannelConfigurationPropertyFilterMixin.java @@ -0,0 +1,12 @@ +package ch.psi.daq.queryrest.model; + +import com.fasterxml.jackson.annotation.JsonFilter; + +/** + * Kind of marker for ObjectMapper MixIn. Use this instead of to make sure we can use different + * properties in case ChannelEvents and ChannelConfigurations are mixed in a response. + */ +@JsonFilter("historicChannelConfigurationPropertyFilter") +public class HistoricChannelConfigurationPropertyFilterMixin { + public static final String FILTER_NAME = "historicChannelConfigurationPropertyFilter"; +} diff --git a/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java b/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java index 1ee0f60..8b2327f 100644 --- a/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java +++ b/src/main/java/ch/psi/daq/queryrest/query/QueryManager.java @@ -1,27 +1,33 @@ package ch.psi.daq.queryrest.query; -import java.util.Collection; import java.util.List; import java.util.Map.Entry; import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Triple; +import ch.psi.daq.domain.events.ChannelConfiguration; import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.json.channels.info.ChannelInfos; -import ch.psi.daq.domain.query.ChannelNameRequest; +import ch.psi.daq.domain.query.DAQConfigQuery; +import ch.psi.daq.domain.query.DAQConfigQueryElement; import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.backend.BackendQuery; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse; import ch.psi.daq.domain.query.channels.ChannelsRequest; import ch.psi.daq.domain.query.channels.ChannelsResponse; public interface QueryManager { - - List getChannels(final ChannelsRequest request) throws Exception; + + Stream getChannels(final ChannelsRequest request) throws Exception; - Collection getChannelInfos(final ChannelNameRequest request) throws Exception; + Stream getChannelConfigurations(final ChannelConfigurationsRequest request) throws Exception; + + ChannelConfiguration getChannelConfiguration(final ChannelName channel) throws Exception; - List>>> getEvents(final DAQQueries queries) + Entry>> queryConfigs(final DAQConfigQuery query) throws Exception; + + List>>> queryEvents(final DAQQueries queries) throws Exception; } diff --git a/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java b/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java index fcac62f..1b8f0af 100644 --- a/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java +++ b/src/main/java/ch/psi/daq/queryrest/query/QueryManagerImpl.java @@ -1,8 +1,8 @@ package ch.psi.daq.queryrest.query; import java.util.ArrayList; -import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import java.util.stream.Collectors; @@ -19,24 +19,26 @@ import org.springframework.context.ApplicationContextAware; import ch.psi.daq.domain.DataEvent; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; +import ch.psi.daq.domain.events.ChannelConfiguration; import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.json.channels.info.ChannelInfos; -import ch.psi.daq.domain.query.ChannelNameRequest; +import ch.psi.daq.domain.query.DAQConfigQuery; +import ch.psi.daq.domain.query.DAQConfigQueryElement; import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.BackendQueryImpl; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; -import ch.psi.daq.domain.query.channels.ChannelNameCache; +import ch.psi.daq.domain.query.channels.BackendsChannelConfigurationCache; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse; import ch.psi.daq.domain.query.channels.ChannelsRequest; import ch.psi.daq.domain.query.channels.ChannelsResponse; import ch.psi.daq.domain.query.processor.QueryProcessor; import ch.psi.daq.query.config.QueryConfig; import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.query.model.ChannelInfosStreamImpl; public class QueryManagerImpl implements QueryManager, ApplicationContextAware { - private ChannelNameCache channelNameCache; + private BackendsChannelConfigurationCache channelsCache; private Function queryAnalizerFactory; @SuppressWarnings("unchecked") @@ -45,7 +47,7 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware { final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); context = backend.getApplicationContext(); - channelNameCache = context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class); + channelsCache = context.getBean(QueryConfig.BEAN_NAME_HISTORIC_CHANNELS_CACHE, BackendsChannelConfigurationCache.class); queryAnalizerFactory = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_ANALIZER_FACTORY, Function.class); } @@ -53,41 +55,67 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware { public void destroy() {} @Override - public List getChannels(ChannelsRequest request) { + public Stream getChannels(ChannelsRequest request) { // in case not specified use defaults (e.g. GET) if (request == null) { request = new ChannelsRequest(); } - return channelNameCache.getChannels(request); - } - - public Collection getChannelInfos(final ChannelNameRequest request) { - // set backends if not defined yet - channelNameCache.configureBackends(request.getChannels()); - - final Stream stream = request.getRequestsByBackend().entrySet().stream() - .filter(entry -> entry.getKey().getBackendAccess().hasDataReader() - && entry.getKey().getBackendAccess().hasChannelInfoReader()) - .flatMap(entry -> { - return entry.getValue().getChannelInfos(entry.getKey()) - .entrySet().stream() - .map(innerEntry -> { - return new ChannelInfosStreamImpl( - new ChannelName(innerEntry.getKey(), entry.getKey()), - innerEntry.getValue()); - }); - }); - - // materialize - return stream.collect(Collectors.toList()); + return channelsCache.getChannels(request); } @Override - public List>>> getEvents( + public Stream getChannelConfigurations(ChannelConfigurationsRequest request) { + // in case not specified use defaults (e.g. GET) + if (request == null) { + request = new ChannelConfigurationsRequest(); + } + + return channelsCache.getChannelConfigurations(request); + } + + @Override + public ChannelConfiguration getChannelConfiguration(ChannelName channel) { + return channelsCache.getChannelConfiguration(channel); + } + + @Override + public Entry>> queryConfigs( + final DAQConfigQuery daqQuery) { + // set backends if not defined yet + channelsCache.configureBackends(daqQuery.getChannels()); + + Stream> resultStreams = + BackendQueryImpl + .getBackendQueries(daqQuery) + .stream() + .filter( + query -> query.getBackend().getBackendAccess().hasStreamEventReader()) + .flatMap( + query -> { + /* all the magic happens here */ + final Map> channelToConfig = + query.getChannelConfigurations(); + + return channelToConfig.entrySet().stream() + .map(entry -> { + return Triple.of( + query, + new ChannelName(entry.getKey(), query.getBackend()), + entry.getValue()); + }); + }); + + return Pair.of(daqQuery, resultStreams); + } + + @Override + public List>>> queryEvents( final DAQQueries queries) { // set backends if not defined yet - channelNameCache.configureBackends(queries); + for (DAQQueryElement daqQuery : queries) { + channelsCache.configureBackends(daqQuery.getChannels()); + } final List>>> results = new ArrayList<>(queries.getQueries().size()); @@ -102,7 +130,8 @@ public class QueryManagerImpl implements QueryManager, ApplicationContextAware { && query.getBackend().getBackendAccess().hasQueryProcessor()) .flatMap( query -> { - final QueryProcessor processor = query.getBackend().getBackendAccess().getQueryProcessor(); + final QueryProcessor processor = + query.getBackend().getBackendAccess().getQueryProcessor(); final BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query); /* all the magic happens here */ diff --git a/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java b/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java deleted file mode 100644 index 7d5c3a5..0000000 --- a/src/main/java/ch/psi/daq/queryrest/query/model/ChannelInfosStreamImpl.java +++ /dev/null @@ -1,44 +0,0 @@ -package ch.psi.daq.queryrest.query.model; - -import java.util.Iterator; -import java.util.stream.Stream; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.json.channels.info.ChannelInfo; -import ch.psi.daq.domain.json.channels.info.ChannelInfos; - -public class ChannelInfosStreamImpl implements ChannelInfos { - private ChannelName channel; - private Stream infos; - - public ChannelInfosStreamImpl() {} - - public ChannelInfosStreamImpl(final ChannelName channel, final Stream infos) { - this.channel = channel; - this.infos = infos; - } - - @Override - public ChannelName getChannel() { - return channel; - } - - public Stream getInfos() { - // can only be consumed once - return infos; - } - - @JsonIgnore - @Override - public Iterator iterator() { - return getChannelInfos().iterator(); - } - - @JsonIgnore - @Override - public Stream getChannelInfos() { - return infos.map(info -> (ChannelInfo) info); - } -} diff --git a/src/main/java/ch/psi/daq/queryrest/response/AbstractHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/AbstractHTTPResponse.java index 5823431..aeef1a7 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/AbstractHTTPResponse.java +++ b/src/main/java/ch/psi/daq/queryrest/response/AbstractHTTPResponse.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonEncoding; import ch.psi.daq.domain.query.DAQQueries; +import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.response.ResponseFormat; import ch.psi.daq.domain.query.response.ResponseImpl; @@ -20,8 +21,32 @@ public abstract class AbstractHTTPResponse extends ResponseImpl { } @JsonIgnore - public abstract void respond(final ApplicationContext context, final DAQQueries queries, - HttpServletResponse httpResponse) throws Exception; + public abstract void validateQuery(final Object queryObj); + + @JsonIgnore + public boolean useTableFormat(final Object queryObj) { + if (queryObj instanceof DAQQueries) { + final DAQQueries queries = (DAQQueries) queryObj; + + for (final DAQQueryElement query : queries) { + if (query.getMapping() != null) { + return true; + } + } + } else if (queryObj instanceof DAQQueryElement) { + return ((DAQQueryElement) queryObj).getMapping() != null; + } + + return false; + } + + @JsonIgnore + public abstract void respond( + final ApplicationContext context, + final HttpServletResponse httpResponse, + final Object query, final R result, + final ResponseFormatter formatter) + throws Exception; /** * Configures the output stream and headers according to whether compression is wanted or not. diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseFormatter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseFormatter.java new file mode 100644 index 0000000..7aea140 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseFormatter.java @@ -0,0 +1,16 @@ +package ch.psi.daq.queryrest.response; + +import java.io.OutputStream; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.ObjectMapper; + +public interface ResponseFormatter { + + void format( + final JsonFactory factory, + final ObjectMapper mapper, + final R result, + final OutputStream out, + final AbstractHTTPResponse response) throws Exception; +} diff --git a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java index 03b1c48..2f692e5 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/ResponseStreamWriter.java @@ -1,30 +1,27 @@ package ch.psi.daq.queryrest.response; import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; import javax.servlet.ServletResponse; -import org.apache.commons.lang3.tuple.Triple; - -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.response.Response; - public interface ResponseStreamWriter { /** * Responding with the the contents of the stream by writing into the output stream of the * {@link ServletResponse}. * - * @param results The results results + * @param The JAVA result type + * @param query The query + * @param result The result * @param out The OutputStream * @param response The Response + * @param formatter The ResponseFormatter * @throws Exception thrown if writing to the output stream fails */ - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception; + public void respond( + final Object query, + final R result, + final OutputStream out, + final AbstractHTTPResponse response, + final ResponseFormatter formatter) throws Exception; } diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java index 329647c..512af11 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java +++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVHTTPResponse.java @@ -1,13 +1,9 @@ package ch.psi.daq.queryrest.response.csv; import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.lang3.tuple.Triple; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; @@ -15,16 +11,14 @@ import org.springframework.context.ApplicationContext; import com.hazelcast.util.collection.ArrayUtils; import ch.psi.daq.domain.FieldNames; -import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.Compression; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.ResponseFormat; -import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; public class CSVHTTPResponse extends AbstractHTTPResponse { private static final Logger LOGGER = LoggerFactory.getLogger(CSVHTTPResponse.class); @@ -42,46 +36,42 @@ public class CSVHTTPResponse extends AbstractHTTPResponse { } @Override - public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse httpResponse) - throws Exception { - final OutputStream out = handleCompressionAndResponseHeaders(httpResponse, CONTENT_TYPE); + public void validateQuery(final Object queryObj) { + if (queryObj instanceof DAQQueries) { + final DAQQueries queries = (DAQQueries) queryObj; + for (final DAQQueryElement query : queries) { + if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation() + .getAggregationType()))) { + // We allow only no aggregation or value aggregation as + // extrema: nested structure and not clear how to map it to one line + // index: value is an array of Statistics whose size is not clear at initialization + // time + final String message = + "CSV export does not support '" + query.getAggregation().getAggregationType() + "'"; + LOGGER.warn(message); + throw new IllegalArgumentException(message); + } - // do csv specific validations - validateQueries(queries); - try { - LOGGER.debug("Executing query '{}'", queries); - - final QueryManager queryManager = context.getBean(QueryManager.class); - final CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class); - - // execute query - final List>>> result = - queryManager.getEvents(queries); - // write the response back to the client using java 8 streams - streamWriter.respond(result, out, this); - } catch (Exception e) { - LOGGER.error("Failed to execute query '{}'.", queries, e); - throw e; + if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { + query.addField(EventField.globalMillis); + } + } } } - protected void validateQueries(final DAQQueries queries) { - for (final DAQQueryElement query : queries) { - if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation() - .getAggregationType()))) { - // We allow only no aggregation or value aggregation as - // extrema: nested structure and not clear how to map it to one line - // index: value is an array of Statistics whose size is not clear at initialization time - final String message = "CSV export does not support '" + query.getAggregation().getAggregationType() + "'"; - LOGGER.warn(message); - throw new IllegalArgumentException(message); - } + @Override + public void respond( + final ApplicationContext context, + final HttpServletResponse response, + final Object query, + final R result, + final ResponseFormatter formatter) throws Exception { + final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); + final CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class); - if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { - query.addField(QueryField.globalMillis); - } - } + // write the response back to the client using java 8 streams + streamWriter.respond(query, result, out, this, formatter); } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java index 20760f7..951e08c 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/csv/CSVResponseStreamWriter.java @@ -40,6 +40,7 @@ import ch.psi.daq.domain.DataEvent; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.json.ChannelName; +import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueryElement; import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer; @@ -47,9 +48,11 @@ import ch.psi.daq.domain.query.mapping.IncompleteStrategy; import ch.psi.daq.domain.query.mapping.Mapping; import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.Extrema; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.queryrest.config.QueryRestConfig; +import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; import ch.psi.daq.queryrest.response.ResponseStreamWriter; /** @@ -84,6 +87,23 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio @SuppressWarnings("unchecked") @Override + public void respond( + final Object query, + final R result, + final OutputStream out, + final AbstractHTTPResponse response, + final ResponseFormatter formatter) throws Exception { + if (query instanceof DAQQueries) { + respond((List>>>) result, + out, response); + } else { + final String message = String.format("'%s' has no response type for '%s'.", query); + LOGGER.error(message); + throw new IllegalStateException(message); + } + } + + @SuppressWarnings("unchecked") public void respond(final List>>> results, final OutputStream out, final Response response) throws Exception { if (results.size() > 1) { @@ -200,15 +220,15 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio private void setupChannelColumns(final DAQQueryElement daqQuery, final BackendQuery backendQuery, final ChannelName channelName, final Collection header, Collection>> accessors) { - final Set queryFields = daqQuery.getFields(); + final Set queryFields = daqQuery.getFields(); final List aggregations = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null; final List extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null; final BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery); - for (final QueryField field : queryFields) { - if (!(QueryField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) { + for (final EventField field : queryFields) { + if (!(EventField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) { final StringBuilder buf = new StringBuilder(3) .append(channelName.getName()) .append(DELIMITER_CHANNELNAME_FIELDNAME) @@ -225,7 +245,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio final StringBuilder buf = new StringBuilder(5) .append(channelName.getName()) .append(DELIMITER_CHANNELNAME_FIELDNAME) - .append(QueryField.value.name()) + .append(EventField.value.name()) .append(DELIMITER_CHANNELNAME_FIELDNAME) .append(aggregation.name()); @@ -236,8 +256,8 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter, Applicatio if (extrema != null && queryAnalyzer.isAggregationEnabled()) { for (final Extrema extremum : extrema) { - for (final QueryField field : queryFields) { - final Function accessor = extremum.getAccessor(field); + for (final EventField field : queryFields) { + final Function accessor = extremum.getAccessor(field); if (accessor != null) { final StringBuilder buf = new StringBuilder(7) .append(channelName.getName()) diff --git a/src/main/java/ch/psi/daq/queryrest/response/csv/QueryFieldStringifyer.java b/src/main/java/ch/psi/daq/queryrest/response/csv/QueryFieldStringifyer.java index 7e78c11..1dcf659 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/csv/QueryFieldStringifyer.java +++ b/src/main/java/ch/psi/daq/queryrest/response/csv/QueryFieldStringifyer.java @@ -10,11 +10,11 @@ public class QueryFieldStringifyer implements Function { public static final String OPEN_BRACKET = "["; public static final String CLOSE_BRACKET = "]"; - private Function accessor; + private Function accessor; private String nonValue; private String arraySeparator; - public QueryFieldStringifyer(Function accessor, String nonValue, String arraySeparator) { + public QueryFieldStringifyer(Function accessor, String nonValue, String arraySeparator) { this.accessor = accessor; this.nonValue = nonValue; this.arraySeparator = arraySeparator; diff --git a/src/main/java/ch/psi/daq/queryrest/response/formatter/AnyResponseFormatter.java b/src/main/java/ch/psi/daq/queryrest/response/formatter/AnyResponseFormatter.java new file mode 100644 index 0000000..b7c1c58 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/response/formatter/AnyResponseFormatter.java @@ -0,0 +1,88 @@ +package ch.psi.daq.queryrest.response.formatter; + +import java.io.OutputStream; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; + +import ch.psi.daq.domain.query.operation.ConfigField; +import ch.psi.daq.domain.query.operation.EventField; +import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; + +public class AnyResponseFormatter implements ResponseFormatter, + ApplicationContextAware { + private String eventFieldsBeanName; + private String configFieldsBeanName; + + private Set eventFields; + private Set configFields; + + public AnyResponseFormatter(final String eventFieldsBeanName, final String configFieldsBeanName) { + this.eventFieldsBeanName = eventFieldsBeanName; + this.configFieldsBeanName = configFieldsBeanName; + } + + public AnyResponseFormatter(final Set eventFields, final Set configFields) { + this.eventFields = eventFields; + this.configFields = configFields; + } + + @SuppressWarnings("unchecked") + @Override + public void setApplicationContext(ApplicationContext context) throws BeansException { + if (eventFields == null) { + final Set defaultEventFields = + context.getBean(eventFieldsBeanName, Set.class); + this.eventFields = + defaultEventFields.stream().map(QueryField::getName) + .collect(Collectors.toCollection(LinkedHashSet::new)); + } + + if (configFields == null) { + final Set defaultConfigFields = + context.getBean(configFieldsBeanName, Set.class); + this.configFields = + defaultConfigFields.stream().map(QueryField::getName) + .collect(Collectors.toCollection(LinkedHashSet::new)); + } + } + + public Set getEventFields() { + return Collections.unmodifiableSet(eventFields); + } + + public Set getConfigFields() { + return Collections.unmodifiableSet(configFields); + } + + @Override + public void format( + final JsonFactory factory, + final ObjectMapper mapper, + final Object result, + final OutputStream out, + final AbstractHTTPResponse response) throws Exception { + final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8); + final ObjectWriter writer = DAQQueriesResponseFormatter.configureWriter(mapper, eventFields, configFields); + + try { + writer.writeValue(generator, result); + } finally { + generator.flush(); + generator.close(); + } + } +} diff --git a/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQConfigQueryResponseFormatter.java b/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQConfigQueryResponseFormatter.java new file mode 100644 index 0000000..1566242 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQConfigQueryResponseFormatter.java @@ -0,0 +1,127 @@ +package ch.psi.daq.queryrest.response.formatter; + +import java.io.OutputStream; +import java.util.LinkedHashSet; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Stream; + +import org.apache.commons.lang3.tuple.Triple; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; + +import ch.psi.daq.domain.json.ChannelName; +import ch.psi.daq.domain.query.DAQConfigQueryElement; +import ch.psi.daq.domain.query.backend.BackendQuery; +import ch.psi.daq.domain.query.operation.ConfigField; +import ch.psi.daq.domain.query.operation.EventField; +import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; + +public class DAQConfigQueryResponseFormatter + implements ResponseFormatter>>>, + ApplicationContextAware { + private static final Logger LOGGER = LoggerFactory.getLogger(DAQConfigQueryResponseFormatter.class); + + public static final String CONFIGS_RESP_FIELD = "configs"; + + @Override + public void setApplicationContext(ApplicationContext context) throws BeansException {} + + @Override + public void format( + final JsonFactory factory, + final ObjectMapper mapper, + final Entry>> result, + final OutputStream out, + final AbstractHTTPResponse response) throws Exception { + final AtomicReference exception = new AtomicReference<>(); + final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8); + + final DAQConfigQueryElement daqQuery = result.getKey(); + final Set includedFields = getFields(daqQuery, true); + final ObjectWriter writer = DAQQueriesResponseFormatter.configureWriter(mapper, null, includedFields); + + try { + writeArrayFormat(generator, writer, result, exception); + } finally { + generator.flush(); + generator.close(); + } + + if (exception.get() != null) { + throw exception.get(); + } + } + + private void writeArrayFormat(final JsonGenerator generator, final ObjectWriter writer, + final Entry>> entryy, + final AtomicReference exception) { + final DAQConfigQueryElement daqQuery = entryy.getKey(); + + try { + generator.writeStartArray(); + + entryy.getValue() + /* ensure elements are sequentially written */ + .sequential() + .forEach( + triple -> { + try { + generator.writeStartObject(); + generator.writeFieldName(EventField.channel.name()); + writer.writeValue(generator, triple.getMiddle()); + generator.writeFieldName(CONFIGS_RESP_FIELD); + writer.writeValue(generator, triple.getRight()); + generator.writeEndObject(); + } catch (Exception e) { + LOGGER.error("Could not write channel name of channel configuration '{}'", + triple.getMiddle(), + e); + exception.compareAndSet(null, e); + } finally { + if (triple.getRight() instanceof Stream) { + ((Stream) (triple.getRight())).close(); + } + } + }); + + generator.writeEndArray(); + } catch (Exception e) { + LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e); + exception.compareAndSet(null, e); + } + } + + private static Set getFields(final DAQConfigQueryElement query, + final boolean removeIdentifiers) { + final Set queryFields = query.getFields(); + + final Set includedFields = + new LinkedHashSet(queryFields.size()); + + for (final QueryField field : queryFields) { + includedFields.add(field.getName()); + } + + if (removeIdentifiers) { + // do not write channel/backend since it is already provided as key in mapping + includedFields.remove(ConfigField.channel.name()); + includedFields.remove(ConfigField.name.name()); + includedFields.remove(ConfigField.backend.name()); + } + + return includedFields; + } +} diff --git a/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQQueriesResponseFormatter.java b/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQQueriesResponseFormatter.java new file mode 100644 index 0000000..eebc97f --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/response/formatter/DAQQueriesResponseFormatter.java @@ -0,0 +1,328 @@ +package ch.psi.daq.queryrest.response.formatter; + +import java.io.OutputStream; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Map.Entry; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.ToLongFunction; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.tuple.Triple; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; +import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; + +import ch.psi.daq.common.stream.match.ListCreator; +import ch.psi.daq.common.stream.match.ListFiller; +import ch.psi.daq.common.stream.match.Padder; +import ch.psi.daq.common.stream.match.StreamMatcher; +import ch.psi.daq.common.time.TimeUtils; +import ch.psi.daq.domain.DataEvent; +import ch.psi.daq.domain.json.ChannelName; +import ch.psi.daq.domain.query.DAQQueryElement; +import ch.psi.daq.domain.query.backend.BackendQuery; +import ch.psi.daq.domain.query.bin.BinningStrategy; +import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinPulse; +import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinTime; +import ch.psi.daq.domain.query.mapping.IncompleteStrategy; +import ch.psi.daq.domain.query.mapping.Mapping; +import ch.psi.daq.domain.query.operation.Aggregation; +import ch.psi.daq.domain.query.operation.EventField; +import ch.psi.daq.domain.query.operation.Extrema; +import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.request.range.RequestRange; +import ch.psi.daq.query.bin.aggregate.BinnedValueCombiner; +import ch.psi.daq.queryrest.config.QueryRestConfig; +import ch.psi.daq.queryrest.model.HistoricChannelConfigurationPropertyFilterMixin; +import ch.psi.daq.queryrest.model.PropertyFilterMixin; +import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; +import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter; + +public class DAQQueriesResponseFormatter implements ResponseFormatter>>>>, ApplicationContextAware { + private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponseStreamWriter.class); + + public static final String DATA_RESP_FIELD = "data"; + + public static final Mapping DEFAULT_MAPPING = new Mapping(IncompleteStrategy.PROVIDE_AS_IS); + private static final long MILLIS_PER_PULSE = TimeUtils.MILLIS_PER_PULSE; + private static final Function KEY_PROVIDER = (event) -> new ChannelName(event.getChannel(), + event.getBackend()); + // try to match sync data (bsread) with non sync data (epics) based on the time usin 10 millis + // buckets. + private static final ToLongFunction MATCHER_PROVIDER = (event) -> event.getGlobalMillis() + / MILLIS_PER_PULSE; + + // In case ArchiverAppliance had several events within the 10ms mapping interval, return these + // aggregations (only used for table format) + private Set defaultEventResponseAggregations; + + @SuppressWarnings("unchecked") + @Override + public void setApplicationContext(ApplicationContext context) throws BeansException { + final Set defaultEventResponseAggregations = + context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_EVENT_RESPONSE_AGGREGATIONS, Set.class);; + this.defaultEventResponseAggregations = + defaultEventResponseAggregations.stream().map(Aggregation::name) + .collect(Collectors.toCollection(LinkedHashSet::new)); + } + + @Override + public void format( + final JsonFactory factory, + final ObjectMapper mapper, + final List>>> results, + final OutputStream out, + final AbstractHTTPResponse response) throws Exception { + final AtomicReference exception = new AtomicReference<>(); + final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8); + + try { + if (results.size() > 1) { + generator.writeStartArray(); + } + + results + .forEach(entryy -> { + final DAQQueryElement daqQuery = entryy.getKey(); + + if (response.useTableFormat(daqQuery)) { + final Set includedFields = getFields(daqQuery, false); + /* make sure identifiers are available */ + includedFields.add(EventField.channel.name()); + includedFields.add(EventField.backend.name()); + // issue ATEST-633 + if (!containsAggregation(includedFields)) { + includedFields.addAll(defaultEventResponseAggregations); + } + + final ObjectWriter writer = configureWriter(mapper, includedFields, null); + + writeTableFormat(generator, writer, entryy, exception); + } else { + final Set includedFields = getFields(daqQuery, true); + final ObjectWriter writer = configureWriter(mapper, includedFields, null); + + writeArrayFormat(generator, writer, entryy, exception); + } + }); + } finally { + if (results.size() > 1) { + generator.writeEndArray(); + } + + generator.flush(); + generator.close(); + } + + if (exception.get() != null) { + throw exception.get(); + } + } + + private static void writeArrayFormat(final JsonGenerator generator, final ObjectWriter writer, + final Entry>> entryy, + final AtomicReference exception) { + final DAQQueryElement daqQuery = entryy.getKey(); + + try { + generator.writeStartArray(); + + entryy.getValue() + /* ensure elements are sequentially written */ + .sequential() + .forEach( + triple -> { + try { + generator.writeStartObject(); + generator.writeFieldName(EventField.channel.name()); + writer.writeValue(generator, triple.getMiddle()); + generator.writeFieldName(DATA_RESP_FIELD); + writer.writeValue(generator, triple.getRight()); + generator.writeEndObject(); + } catch (Exception e) { + LOGGER.error("Could not write channel name of channel '{}'", triple.getMiddle(), + e); + exception.compareAndSet(null, e); + } finally { + if (triple.getRight() instanceof Stream) { + ((Stream) (triple.getRight())).close(); + } + } + }); + + generator.writeEndArray(); + } catch (Exception e) { + LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e); + exception.compareAndSet(null, e); + } + } + + @SuppressWarnings("unchecked") + private static void writeTableFormat(JsonGenerator generator, ObjectWriter writer, + Entry>> entryy, + AtomicReference exception) { + /* get DataEvent stream of sub-queries for later match */ + final Map> streams = + new LinkedHashMap<>(); + final AtomicReference backendQueryRef = new AtomicReference<>(); + final DAQQueryElement daqQuery = entryy.getKey(); + + entryy.getValue() + .sequential() + .forEach( + triple -> { + backendQueryRef.compareAndSet(null, triple.getLeft()); + + if (triple.getRight() instanceof Stream) { + streams.put(triple.getMiddle(), ((Stream) triple.getRight())); + } else { + final String message = + String.format("Expect a DataEvent Stream for '%s' but got '%s'.", + triple.getMiddle(), triple.getRight().getClass().getSimpleName()); + LOGGER.warn(message); + streams.put(triple.getMiddle(), Stream.empty()); + } + }); + + final BackendQuery backendQuery = backendQueryRef.get(); + final RequestRange requestRange = backendQuery.getRequest().getRequestRange(); + BinningStrategy binningStrategy = backendQuery.getBinningStrategy(); + + final Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING); + final Padder padder = mapping.getIncomplete().getPadder(backendQuery); + + ToLongFunction matchProvider = binningStrategy; + if (binningStrategy == null) { + matchProvider = MATCHER_PROVIDER; + if (requestRange.isPulseIdRangeDefined()) { + binningStrategy = new BinningStrategyPerBinPulse(1); + } else if (requestRange.isTimeRangeDefined()) { + binningStrategy = new BinningStrategyPerBinTime(MILLIS_PER_PULSE); + } else { + final String message = "Either time or pulseId range must be defined by the query!"; + LOGGER.error(message); + throw new IllegalStateException(message); + } + } + binningStrategy.setRequestRange(requestRange); + + /* online matching of the stream's content */ + final StreamMatcher> streamMatcher = + new StreamMatcher<>( + KEY_PROVIDER, + matchProvider, + new ListCreator(), + new ListFiller(), + new BinnedValueCombiner(binningStrategy), + padder, + streams.values()); + final Iterator> streamsMatchIter = streamMatcher.iterator(); + + try { + generator.writeStartObject(); + generator.writeFieldName(DATA_RESP_FIELD); + writer.writeValue(generator, streamsMatchIter); + generator.writeEndObject(); + } catch (Exception e) { + LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e); + exception.compareAndSet(null, e); + } finally { + if (streamMatcher != null) { + try { + streamMatcher.close(); + } catch (Throwable t) { + LOGGER.error( + "Something went wrong while closing stream matcher for JSON table response writer.", + t); + } + } + } + } + + /** + * Configures the writer dynamically by including the fields which should be included in the + * response. + * + * @param mapper The ObjectMapper + * @param includedEventFields set of strings which correspond to the getter method names of the + * classes registered as a mixed-in + * @param includedConfigFields set of strings which correspond to the getter method names of the + * classes registered as a mixed-in + * @return the configured writer that includes the specified fields + */ + public static ObjectWriter configureWriter(final ObjectMapper mapper, final Set includedEventFields, + final Set includedConfigFields) { + final SimpleFilterProvider propertyFilter = new SimpleFilterProvider(); + if (includedEventFields != null) { + propertyFilter.addFilter(PropertyFilterMixin.FILTER_NAME, + SimpleBeanPropertyFilter.filterOutAllExcept(includedEventFields)); + } + if (includedConfigFields != null) { + propertyFilter.addFilter(HistoricChannelConfigurationPropertyFilterMixin.FILTER_NAME, + SimpleBeanPropertyFilter.filterOutAllExcept(includedConfigFields)); + } + // only write the properties not excluded in the filter + final ObjectWriter writer = mapper.writer(propertyFilter); + return writer; + } + + private static Set getFields(final DAQQueryElement query, final boolean removeIdentifiers) { + final Set queryFields = query.getFields(); + final List aggregations = + query.getAggregation() != null ? query.getAggregation().getAggregations() : null; + final List extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null; + + final Set includedFields = + new LinkedHashSet(queryFields.size() + (aggregations != null ? aggregations.size() : 0) + + (extrema != null ? extrema.size() : 0)); + + for (final QueryField field : queryFields) { + includedFields.add(field.getName()); + } + if (aggregations != null) { + for (final Aggregation aggregation : aggregations) { + includedFields.add(aggregation.name()); + } + } + if (extrema != null) { + // field of ExtremaCalculator (extrema in BinnedValueCombinedDataEvent and + // BinnedIndexCombinedDataEvent) + includedFields.add("extrema"); + } + + if (removeIdentifiers) { + // do not write channel/backend since it is already provided as key in mapping + includedFields.remove(EventField.channel.name()); + includedFields.remove(EventField.backend.name()); + } + + return includedFields; + } + + private static boolean containsAggregation(final Set includedFields) { + for (final Aggregation aggregation : Aggregation.values()) { + if (includedFields.contains(aggregation.name())) { + return true; + } + } + return false; + } +} diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/AbstractResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/json/AbstractResponseStreamWriter.java new file mode 100644 index 0000000..617c574 --- /dev/null +++ b/src/main/java/ch/psi/daq/queryrest/response/json/AbstractResponseStreamWriter.java @@ -0,0 +1,51 @@ +package ch.psi.daq.queryrest.response.json; + +import java.io.OutputStream; + +import javax.servlet.ServletResponse; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.ObjectMapper; + +import ch.psi.daq.domain.backend.Backend; +import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; +import ch.psi.daq.queryrest.response.ResponseStreamWriter; + +/** + * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} + * of the current request. + */ +public abstract class AbstractResponseStreamWriter implements ResponseStreamWriter { + + protected void init(final Backend backend) {} + + @Override + public void respond( + final Object query, + final R result, + final OutputStream out, + final AbstractHTTPResponse response, + final ResponseFormatter formatter) throws Exception { + formatter.format(getJsonFactory(), getObjectMapper(), result, out, response); + + // if (query instanceof DAQQueries) { + // DAQQueriesResponseFormatter.respond(getJsonFactory(), getObjectMapper(), + // defaultEventResponseAggregations, + // (List>>>) result, + // out, response); + // } else if (query instanceof DAQConfigQuery) { + // DAQChannelConfigurationQueryResponseWriter.respond(getJsonFactory(), getObjectMapper(), + // (Entry>>) result, + // out, response); + // } else { + // AnyResponseWriter.respond(getJsonFactory(), getObjectMapper(), result, out, response, + // defaultEventFields, + // defaultConfigFields); + // } + } + + protected abstract JsonFactory getJsonFactory(); + + protected abstract ObjectMapper getObjectMapper(); +} diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java index 856f4e3..8e97b02 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java +++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONHTTPResponse.java @@ -1,35 +1,25 @@ package ch.psi.daq.queryrest.response.json; import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.lang3.tuple.Triple; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import org.springframework.http.MediaType; import com.hazelcast.util.collection.ArrayUtils; import ch.psi.daq.domain.FieldNames; -import ch.psi.daq.domain.json.ChannelName; import ch.psi.daq.domain.query.DAQQueries; import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.operation.Compression; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.ResponseFormat; -import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; import ch.psi.daq.queryrest.response.ResponseStreamWriter; public class JSONHTTPResponse extends AbstractHTTPResponse { - private static final Logger LOGGER = LoggerFactory.getLogger(JSONHTTPResponse.class); - public static final String FORMAT = "json"; public static final String CONTENT_TYPE = MediaType.APPLICATION_JSON_VALUE; @@ -43,47 +33,53 @@ public class JSONHTTPResponse extends AbstractHTTPResponse { } @Override - public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { - final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); - - final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); - - try { - LOGGER.debug("Executing query '{}'", queries); - - final QueryManager queryManager = context.getBean(QueryManager.class); - final ResponseStreamWriter streamWriter; - if (hasMapping) { - streamWriter = context.getBean(JSONTableResponseStreamWriter.class); - } else { - streamWriter = context.getBean(JSONResponseStreamWriter.class); - } - - // execute query - final List>>> result = - queryManager.getEvents(queries); - // write the response back to the client using java 8 streams - streamWriter.respond(result, out, this); - } catch (Exception e) { - LOGGER.error("Failed to execute query '{}'.", queries, e); - throw e; - } + public void validateQuery(final Object query) { + JSONHTTPResponse.defaultQueryValidation(query); } + // @Override + // public void respond(final ApplicationContext context, final HttpServletResponse response, + // final Object query, + // final Object result) throws Exception { + // final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); + // final boolean useTableFormat = JSONHTTPResponse.useTableFormat(query); + // + // final ResponseStreamWriter streamWriter; + // if (useTableFormat) { + // streamWriter = context.getBean(JSONTableResponseStreamWriter.class); + // } else { + // streamWriter = context.getBean(JSONResponseStreamWriter.class); + // } + // + // // write the response back to the client using java 8 streams + // streamWriter.respond(query, result, out, this); + // } - public static boolean validateQueries(final DAQQueries queries) { - boolean hasMapping = false; + @Override + public void respond( + final ApplicationContext context, + final HttpServletResponse response, + final Object query, + final R result, + final ResponseFormatter formatter) throws Exception { + final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); - for (final DAQQueryElement query : queries) { - if (query.getMapping() != null) { - hasMapping = true; + final ResponseStreamWriter streamWriter = context.getBean(JSONResponseStreamWriter.class); - if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { - query.addField(QueryField.globalMillis); + // write the response back to the client using java 8 streams + streamWriter.respond(query, result, out, this, formatter); + } + + public static void defaultQueryValidation(final Object queryObj) { + if (queryObj instanceof DAQQueries) { + final DAQQueries queries = (DAQQueries) queryObj; + for (final DAQQueryElement query : queries) { + if (query.getMapping() != null) { + if (!ArrayUtils.contains(query.getColumns(), FieldNames.FIELD_GLOBAL_TIME)) { + query.addField(EventField.globalMillis); + } } } } - - return hasMapping; } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java index 1104fef..cfc0392 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/json/JSONResponseStreamWriter.java @@ -1,181 +1,39 @@ package ch.psi.daq.queryrest.response.json; -import java.io.OutputStream; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; -import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; -import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.operation.Extrema; -import ch.psi.daq.domain.query.operation.QueryField; -import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.model.PropertyFilterMixin; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; - -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class JSONResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { - - public static final String DATA_RESP_FIELD = "data"; - - private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponseStreamWriter.class); +public class JSONResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware { private ObjectMapper mapper; private JsonFactory factory; @Override public void setApplicationContext(ApplicationContext context) throws BeansException { final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); + context = backend.getApplicationContext(); mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class); + + super.init(backend); } @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - respond(factory, mapper, results, out, response); + protected JsonFactory getJsonFactory() { + return factory; } - public static Set getFields(final DAQQueryElement query, final boolean removeIdentifiers) { - final Set queryFields = query.getFields(); - final List aggregations = - query.getAggregation() != null ? query.getAggregation().getAggregations() : null; - final List extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null; - - final Set includedFields = - new LinkedHashSet(queryFields.size() + (aggregations != null ? aggregations.size() : 0) - + (extrema != null ? extrema.size() : 0)); - - for (final QueryField field : queryFields) { - includedFields.add(field.name()); - } - if (aggregations != null) { - for (final Aggregation aggregation : aggregations) { - includedFields.add(aggregation.name()); - } - } - if (extrema != null) { - // field of ExtremaCalculator (extrema in BinnedValueCombinedDataEvent and - // BinnedIndexCombinedDataEvent) - includedFields.add("extrema"); - } - - if (removeIdentifiers) { - // do not write channel/backend since it is already provided as key in mapping - includedFields.remove(QueryField.channel.name()); - includedFields.remove(QueryField.backend.name()); - } - - return includedFields; - } - - public static void respond(final JsonFactory factory, final ObjectMapper mapper, - final List>>> results, - final OutputStream out, final Response response) throws Exception { - final AtomicReference exception = new AtomicReference<>(); - final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8); - - try { - if (results.size() > 1) { - generator.writeStartArray(); - } - - results - .forEach(entryy -> { - final DAQQueryElement daqQuery = entryy.getKey(); - final Set includedFields = getFields(daqQuery, true); - final ObjectWriter writer = configureWriter(includedFields, mapper); - - try { - generator.writeStartArray(); - - entryy.getValue() - /* ensure elements are sequentially written */ - .sequential() - .forEach( - triple -> { - try { - generator.writeStartObject(); - generator.writeFieldName(QueryField.channel.name()); - writer.writeValue(generator, triple.getMiddle()); - generator.writeFieldName(DATA_RESP_FIELD); - writer.writeValue(generator, triple.getRight()); - generator.writeEndObject(); - } catch (Exception e) { - LOGGER.error("Could not write channel name of channel '{}'", triple.getMiddle(), - e); - exception.compareAndSet(null, e); - } finally { - if (triple.getRight() instanceof Stream) { - ((Stream) (triple.getRight())).close(); - } - } - }); - - generator.writeEndArray(); - } catch (Exception e) { - LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e); - exception.compareAndSet(null, e); - } - }); - } finally { - if (results.size() > 1) { - generator.writeEndArray(); - } - - generator.flush(); - generator.close(); - } - - if (exception.get() != null) { - throw exception.get(); - } - } - - /** - * Configures the writer dynamically by including the fields which should be included in the - * response. - * - * @param includedFields set of strings which correspond to the getter method names of the - * classes registered as a mixed-in - * @param mapper The ObjectMapper - * @return the configured writer that includes the specified fields - */ - public static ObjectWriter configureWriter(final Set includedFields, final ObjectMapper mapper) { - final SimpleFilterProvider propertyFilter = new SimpleFilterProvider(); - propertyFilter.addFilter(PropertyFilterMixin.FILTER_NAME, SimpleBeanPropertyFilter.filterOutAllExcept(includedFields)); - // only write the properties not excluded in the filter - final ObjectWriter writer = mapper.writer(propertyFilter); - return writer; + @Override + protected ObjectMapper getObjectMapper() { + return mapper; } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/json/JSONTableResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/json/JSONTableResponseStreamWriter.java deleted file mode 100644 index 114b856..0000000 --- a/src/main/java/ch/psi/daq/queryrest/response/json/JSONTableResponseStreamWriter.java +++ /dev/null @@ -1,232 +0,0 @@ -package ch.psi.daq.queryrest.response.json; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Function; -import java.util.function.ToLongFunction; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; - -import ch.psi.daq.common.stream.match.ListCreator; -import ch.psi.daq.common.stream.match.ListFiller; -import ch.psi.daq.common.stream.match.Padder; -import ch.psi.daq.common.stream.match.StreamMatcher; -import ch.psi.daq.common.time.TimeUtils; -import ch.psi.daq.domain.DataEvent; -import ch.psi.daq.domain.backend.Backend; -import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.bin.BinningStrategy; -import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinPulse; -import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinTime; -import ch.psi.daq.domain.query.mapping.IncompleteStrategy; -import ch.psi.daq.domain.query.mapping.Mapping; -import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.operation.QueryField; -import ch.psi.daq.domain.query.response.Response; -import ch.psi.daq.domain.request.range.RequestRange; -import ch.psi.daq.query.bin.aggregate.BinnedValueCombiner; -import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; - -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class JSONTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { - - private static final Logger LOGGER = LoggerFactory.getLogger(JSONTableResponseStreamWriter.class); - - public static final Mapping DEFAULT_MAPPING = new Mapping(IncompleteStrategy.PROVIDE_AS_IS); - private static final long MILLIS_PER_PULSE = TimeUtils.MILLIS_PER_PULSE; - private static final Function KEY_PROVIDER = (event) -> new ChannelName(event.getChannel(), - event.getBackend()); - // try to match sync data (bsread) with non sync data (epics) based on the time usin 10 millis - // buckets. - private static final ToLongFunction MATCHER_PROVIDER = (event) -> event.getGlobalMillis() - / MILLIS_PER_PULSE; - - - private ObjectMapper mapper; - private JsonFactory factory; - // In case ArchiverAppliance had several events within the 10ms mapping interval, return these - // aggregations - private Set defaultResponseAggregationsStr; - - @SuppressWarnings("unchecked") - @Override - public void setApplicationContext(ApplicationContext context) throws BeansException { - final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); - context = backend.getApplicationContext(); - - mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); - factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class); - - final Set defaultResponseAggregations = - context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);; - defaultResponseAggregationsStr = - defaultResponseAggregations.stream().map(Aggregation::name) - .collect(Collectors.toCollection(LinkedHashSet::new)); - } - - @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - respond(factory, mapper, defaultResponseAggregationsStr, results, out, response); - } - - - @SuppressWarnings("unchecked") - public static void respond(final JsonFactory factory, - final ObjectMapper mapper, final Set defaultResponseAggregationsStr, - final List>>> results, - final OutputStream out, final Response response) throws Exception { - final AtomicReference exception = new AtomicReference<>(); - final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8); - - try { - if (results.size() > 1) { - generator.writeStartArray(); - } - - results - .forEach(entryy -> { - final DAQQueryElement daqQuery = entryy.getKey(); - final Set includedFields = JSONResponseStreamWriter.getFields(daqQuery, false); - /* make sure identifiers are available */ - includedFields.add(QueryField.channel.name()); - includedFields.add(QueryField.backend.name()); - // issue ATEST-633 - if (!containsAggregation(includedFields)) { - includedFields.addAll(defaultResponseAggregationsStr); - } - - final ObjectWriter writer = JSONResponseStreamWriter.configureWriter(includedFields, mapper); - - /* get DataEvent stream of sub-queries for later match */ - final Map> streams = - new LinkedHashMap<>(results.size()); - final AtomicReference backendQueryRef = new AtomicReference<>(); - - entryy.getValue() - .sequential() - .forEach( - triple -> { - backendQueryRef.compareAndSet(null, triple.getLeft()); - - if (triple.getRight() instanceof Stream) { - streams.put(triple.getMiddle(), ((Stream) triple.getRight())); - } else { - final String message = - String.format("Expect a DataEvent Stream for '%s' but got '%s'.", - triple.getMiddle(), triple.getRight().getClass().getSimpleName()); - LOGGER.warn(message); - streams.put(triple.getMiddle(), Stream.empty()); - } - }); - - final BackendQuery backendQuery = backendQueryRef.get(); - final RequestRange requestRange = backendQuery.getRequest().getRequestRange(); - BinningStrategy binningStrategy = backendQuery.getBinningStrategy(); - - final Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING); - final Padder padder = mapping.getIncomplete().getPadder(backendQuery); - - ToLongFunction matchProvider = binningStrategy; - if (binningStrategy == null) { - matchProvider = MATCHER_PROVIDER; - if (requestRange.isPulseIdRangeDefined()) { - binningStrategy = new BinningStrategyPerBinPulse(1); - } else if (requestRange.isTimeRangeDefined()) { - binningStrategy = new BinningStrategyPerBinTime(MILLIS_PER_PULSE); - } else { - final String message = "Either time or pulseId range must be defined by the query!"; - LOGGER.error(message); - throw new IllegalStateException(message); - } - } - binningStrategy.setRequestRange(requestRange); - - /* online matching of the stream's content */ - final StreamMatcher> streamMatcher = - new StreamMatcher<>( - KEY_PROVIDER, - matchProvider, - new ListCreator(), - new ListFiller(), - new BinnedValueCombiner(binningStrategy), - padder, - streams.values()); - final Iterator> streamsMatchIter = streamMatcher.iterator(); - - try { - generator.writeStartObject(); - generator.writeFieldName(JSONResponseStreamWriter.DATA_RESP_FIELD); - writer.writeValue(generator, streamsMatchIter); - generator.writeEndObject(); - } catch (Exception e) { - LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e); - exception.compareAndSet(null, e); - } finally { - if (streamMatcher != null) { - try { - streamMatcher.close(); - } catch (Throwable t) { - LOGGER.error( - "Something went wrong while closing stream matcher for JSON table response writer.", - t); - } - } - } - }); - } catch (IOException e) { - LOGGER.error("Could not write JSON.", e); - exception.compareAndSet(null, e); - } finally { - if (results.size() > 1) { - generator.writeEndArray(); - } - - generator.flush(); - generator.close(); - } - - if (exception.get() != null) { - throw exception.get(); - } - } - - private static boolean containsAggregation(final Set includedFields) { - for (final Aggregation aggregation : Aggregation.values()) { - if (includedFields.contains(aggregation.name())) { - return true; - } - } - return false; - } -} diff --git a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackHTTPResponse.java index 7fc7ae4..4b2f33f 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackHTTPResponse.java +++ b/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackHTTPResponse.java @@ -1,32 +1,20 @@ package ch.psi.daq.queryrest.response.msgpack; import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.lang3.tuple.Triple; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import org.springframework.http.MediaType; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueries; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.response.ResponseFormat; -import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; public class MsgPackHTTPResponse extends AbstractHTTPResponse { - private static final Logger LOGGER = LoggerFactory.getLogger(MsgPackHTTPResponse.class); - public static final String FORMAT = "msgp"; public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE; @@ -40,30 +28,22 @@ public class MsgPackHTTPResponse extends AbstractHTTPResponse { } @Override - public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { + public void validateQuery(final Object query){ + JSONHTTPResponse.defaultQueryValidation(query); + } + + @Override + public void respond( + final ApplicationContext context, + final HttpServletResponse response, + final Object query, + final R result, + final ResponseFormatter formatter) throws Exception { final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); - final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); - - try { - LOGGER.debug("Executing query '{}'", queries); - - final QueryManager queryManager = context.getBean(QueryManager.class); - final ResponseStreamWriter streamWriter; - if (hasMapping) { - streamWriter = context.getBean(MsgPackTableResponseStreamWriter.class); - } else { - streamWriter = context.getBean(MsgPackResponseStreamWriter.class); - } - - // execute query - final List>>> result = - queryManager.getEvents(queries); - // write the response back to the client using java 8 streams - streamWriter.respond(result, out, this); - } catch (Exception e) { - LOGGER.error("Failed to execute query '{}'.", queries, e); - throw e; - } + final ResponseStreamWriter streamWriter = context.getBean(MsgPackResponseStreamWriter.class); + + // write the response back to the client using java 8 streams + streamWriter.respond(query, result, out, this, formatter); } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackResponseStreamWriter.java index 914f9bf..2560006 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackResponseStreamWriter.java @@ -1,35 +1,19 @@ package ch.psi.daq.queryrest.response.msgpack; -import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; +import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; -import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter; +import ch.psi.daq.queryrest.response.json.AbstractResponseStreamWriter; -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class MsgPackResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { +public class MsgPackResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware { private ObjectMapper mapper; private MessagePackFactory factory; @@ -40,11 +24,17 @@ public class MsgPackResponseStreamWriter implements ResponseStreamWriter, Applic mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class); + + super.init(backend); } @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - JSONResponseStreamWriter.respond(factory, mapper, results, out, response); + protected JsonFactory getJsonFactory() { + return factory; + } + + @Override + protected ObjectMapper getObjectMapper() { + return mapper; } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackTableResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackTableResponseStreamWriter.java deleted file mode 100644 index 4f85667..0000000 --- a/src/main/java/ch/psi/daq/queryrest/response/msgpack/MsgPackTableResponseStreamWriter.java +++ /dev/null @@ -1,64 +0,0 @@ -package ch.psi.daq.queryrest.response.msgpack; - -import java.io.OutputStream; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map.Entry; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; -import org.msgpack.jackson.dataformat.MessagePackFactory; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import ch.psi.daq.domain.backend.Backend; -import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.response.Response; -import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; -import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter; - -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class MsgPackTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { - private ObjectMapper mapper; - private MessagePackFactory factory; - // In case ArchiverAppliance had several events within the 10ms mapping interval, return these - // aggregations - private Set defaultResponseAggregationsStr; - - @SuppressWarnings("unchecked") - @Override - public void setApplicationContext(ApplicationContext context) throws BeansException { - final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); - context = backend.getApplicationContext(); - - mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); - factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class); - - final Set defaultResponseAggregations = - context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);; - defaultResponseAggregationsStr = - defaultResponseAggregations.stream().map(Aggregation::name) - .collect(Collectors.toCollection(LinkedHashSet::new)); - } - - @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response); - } -} diff --git a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileHTTPResponse.java b/src/main/java/ch/psi/daq/queryrest/response/smile/SmileHTTPResponse.java index 0587593..520ae0c 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileHTTPResponse.java +++ b/src/main/java/ch/psi/daq/queryrest/response/smile/SmileHTTPResponse.java @@ -1,32 +1,20 @@ package ch.psi.daq.queryrest.response.smile; import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.lang3.tuple.Triple; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import org.springframework.http.MediaType; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueries; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.response.ResponseFormat; -import ch.psi.daq.queryrest.query.QueryManager; import ch.psi.daq.queryrest.response.AbstractHTTPResponse; +import ch.psi.daq.queryrest.response.ResponseFormatter; import ch.psi.daq.queryrest.response.ResponseStreamWriter; import ch.psi.daq.queryrest.response.json.JSONHTTPResponse; public class SmileHTTPResponse extends AbstractHTTPResponse { - private static final Logger LOGGER = LoggerFactory.getLogger(SmileHTTPResponse.class); - public static final String FORMAT = "smile"; public static final String CONTENT_TYPE = MediaType.APPLICATION_OCTET_STREAM_VALUE; @@ -40,30 +28,22 @@ public class SmileHTTPResponse extends AbstractHTTPResponse { } @Override - public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception { + public void validateQuery(final Object query) { + JSONHTTPResponse.defaultQueryValidation(query); + } + + @Override + public void respond( + final ApplicationContext context, + final HttpServletResponse response, + final Object query, + final R result, + final ResponseFormatter formatter) throws Exception { final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE); - final boolean hasMapping = JSONHTTPResponse.validateQueries(queries); + final ResponseStreamWriter streamWriter = context.getBean(SmileResponseStreamWriter.class); - try { - LOGGER.debug("Executing query '{}'", queries); - - final QueryManager queryManager = context.getBean(QueryManager.class); - final ResponseStreamWriter streamWriter; - if (hasMapping) { - streamWriter = context.getBean(SmileTableResponseStreamWriter.class); - } else { - streamWriter = context.getBean(SmileResponseStreamWriter.class); - } - - // execute query - final List>>> result = - queryManager.getEvents(queries); - // write the response back to the client using java 8 streams - streamWriter.respond(result, out, this); - } catch (Exception e) { - LOGGER.error("Failed to execute query '{}'.", queries, e); - throw e; - } + // write the response back to the client using java 8 streams + streamWriter.respond(query, result, out, this, formatter); } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/smile/SmileResponseStreamWriter.java index 5ab09e0..0ffc3fe 100644 --- a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileResponseStreamWriter.java +++ b/src/main/java/ch/psi/daq/queryrest/response/smile/SmileResponseStreamWriter.java @@ -1,36 +1,19 @@ package ch.psi.daq.queryrest.response.smile; -import java.io.OutputStream; -import java.util.List; -import java.util.Map.Entry; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; +import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; -import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter; - -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class SmileResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { +import ch.psi.daq.queryrest.response.json.AbstractResponseStreamWriter; +public class SmileResponseStreamWriter extends AbstractResponseStreamWriter implements ApplicationContextAware { private ObjectMapper mapper; private SmileFactory factory; @@ -41,11 +24,17 @@ public class SmileResponseStreamWriter implements ResponseStreamWriter, Applicat mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class); + + super.init(backend); + } + + @Override + protected JsonFactory getJsonFactory() { + return factory; } @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - JSONResponseStreamWriter.respond(factory, mapper, results, out, response); + protected ObjectMapper getObjectMapper() { + return mapper; } } diff --git a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileTableResponseStreamWriter.java b/src/main/java/ch/psi/daq/queryrest/response/smile/SmileTableResponseStreamWriter.java deleted file mode 100644 index d635ac6..0000000 --- a/src/main/java/ch/psi/daq/queryrest/response/smile/SmileTableResponseStreamWriter.java +++ /dev/null @@ -1,65 +0,0 @@ -package ch.psi.daq.queryrest.response.smile; - -import java.io.OutputStream; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map.Entry; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import javax.servlet.ServletResponse; - -import org.apache.commons.lang3.tuple.Triple; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.smile.SmileFactory; - -import ch.psi.daq.domain.backend.Backend; -import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.ChannelName; -import ch.psi.daq.domain.query.DAQQueryElement; -import ch.psi.daq.domain.query.backend.BackendQuery; -import ch.psi.daq.domain.query.operation.Aggregation; -import ch.psi.daq.domain.query.response.Response; -import ch.psi.daq.queryrest.config.QueryRestConfig; -import ch.psi.daq.queryrest.response.ResponseStreamWriter; -import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter; - -/** - * Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse} - * of the current request. - */ -public class SmileTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware { - - private ObjectMapper mapper; - private SmileFactory factory; - // In case ArchiverAppliance had several events within the 10ms mapping interval, return these - // aggregations - private Set defaultResponseAggregationsStr; - - @SuppressWarnings("unchecked") - @Override - public void setApplicationContext(ApplicationContext context) throws BeansException { - final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class); - context = backend.getApplicationContext(); - - mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class); - factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class); - - final Set defaultResponseAggregations = - context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);; - defaultResponseAggregationsStr = - defaultResponseAggregations.stream().map(Aggregation::name) - .collect(Collectors.toCollection(LinkedHashSet::new)); - } - - @Override - public void respond(final List>>> results, - final OutputStream out, final Response response) throws Exception { - JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response); - } -} diff --git a/src/main/resources/queryrest.properties b/src/main/resources/queryrest.properties index b1a24c1..ce6e060 100644 --- a/src/main/resources/queryrest.properties +++ b/src/main/resources/queryrest.properties @@ -1,9 +1,13 @@ -# defines the fields that are included in the response +# defines the fields that are included in the response of an event query # if no fields have been specified by the user -queryrest.default.response.fields=channel,backend,pulseId,globalSeconds,iocSeconds,shape,eventCount,value +queryrest.response.fields.event.query=channel,backend,pulseId,globalSeconds,iocSeconds,shape,eventCount,value # aggregation which are included in the response by default if aggregation is enabled for a given query -queryrest.default.response.aggregations=min,mean,max +queryrest.response.fields.event.query.aggregations=min,mean,max + +# defines the fields that are included in the response of a config query +queryrest.response.fields.config.query=name,backend,pulseId,globalSeconds,type,shape,source +queryrest.response.fields.config.historic=name,backend,type,shape,source,description # defines if the writer is a local writer (can write data to filesystem) filestorage.writer.local=false diff --git a/src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java b/src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java index c86bbb2..5f1edb1 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java @@ -21,9 +21,7 @@ import org.springframework.context.ApplicationContext; import ch.psi.daq.common.serialization.SerializationHelper; import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.query.channels.ChannelNameCache; import ch.psi.daq.filestorage.config.FileStorageConfig; -import ch.psi.daq.query.config.QueryConfig; import ch.psi.daq.test.queryrest.AbstractDaqRestTest; public class BackendTest extends AbstractDaqRestTest { @@ -178,20 +176,4 @@ public class BackendTest extends AbstractDaqRestTest { assertSame(parentContext, context.getParent()); } } - - @Test - public void testSingleInstance_01() throws Exception { - final Set caches = new HashSet<>(); - for (final Backend backend : Backend.getBackends()) { - ChannelNameCache cache = - backend.getApplicationContext().getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, - ChannelNameCache.class); - caches.add(cache); - } - - assertEquals(1, caches.size()); - assertSame( - context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class), - caches.iterator().next()); - } } diff --git a/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java b/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java index 41a51bd..799ad1f 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java +++ b/src/test/java/ch/psi/daq/test/queryrest/config/DaqWebMvcConfig.java @@ -24,7 +24,6 @@ import ch.psi.daq.domain.backend.Backend; import ch.psi.daq.domain.backend.BackendType; import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.events.ChannelEvent; -import ch.psi.daq.domain.query.channels.reader.ChannelInfoReader; import ch.psi.daq.domain.query.processor.QueryProcessor; import ch.psi.daq.domain.reader.StreamEventReader; import ch.psi.daq.domain.test.reader.TestReader; @@ -68,7 +67,6 @@ public class DaqWebMvcConfig extends WebMvcConfigurerAdapter { // backendType.initBean(backend, BEAN_NAME_READER, DataReader.class, overload); backendType.initBean(backend, BEAN_NAME_READER, StreamEventReader.class, overload, backend); - backendType.initBean(backend, BEAN_NAME_READER, ChannelInfoReader.class, overload, backend); } } diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTableTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTableTest.java index 9c3503f..225766b 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTableTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTableTest.java @@ -24,7 +24,7 @@ import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.json.ChannelEventTableImpl; import ch.psi.daq.domain.query.DAQQuery; import ch.psi.daq.domain.query.mapping.Mapping; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.test.queryrest.AbstractDaqRestTest; @@ -58,12 +58,12 @@ public abstract class AbstractQueryRestControllerTableTest extends AbstractDaqRe 101), TEST_CHANNEL_NAMES); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); - request.addField(QueryField.value); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); + request.addField(EventField.value); request.setResponse(getResponse()); String content = mapper.writeValueAsString(request); diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTest.java index e505ee0..9796ef2 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/AbstractQueryRestControllerTest.java @@ -24,7 +24,7 @@ import ch.psi.daq.domain.config.DomainConfig; import ch.psi.daq.domain.json.ChannelEventsImpl; import ch.psi.daq.domain.json.ChannelEventsList; import ch.psi.daq.domain.query.DAQQuery; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.response.Response; import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.test.queryrest.AbstractDaqRestTest; @@ -56,12 +56,12 @@ public abstract class AbstractQueryRestControllerTest extends AbstractDaqRestTes 100, 101), TEST_CHANNEL_NAMES); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); - request.addField(QueryField.value); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); + request.addField(EventField.value); request.setResponse(getResponse()); String content = mapper.writeValueAsString(request); diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/CSVQueryRestControllerTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/CSVQueryRestControllerTest.java index 3982643..f2c8188 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/CSVQueryRestControllerTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/CSVQueryRestControllerTest.java @@ -33,7 +33,7 @@ import ch.psi.daq.domain.query.operation.AggregationDescriptor; import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Extrema; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.request.range.RequestRangeDate; import ch.psi.daq.domain.request.range.RequestRangePulseId; import ch.psi.daq.domain.request.range.RequestRangeTime; @@ -65,16 +65,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -107,7 +107,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals(queryFields.size() * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } @@ -160,11 +160,11 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -196,7 +196,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals(queryFields.size() * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } @@ -252,9 +252,9 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -287,12 +287,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals((queryFields.size() + 1) * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } assertEquals( - channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.globalMillis.name(), + channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.globalMillis.name(), record.get(column++)); } @@ -430,16 +430,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -472,7 +472,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals(queryFields.size() * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } @@ -514,16 +514,16 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -556,7 +556,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals(queryFields.size() * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } @@ -599,18 +599,18 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocDate); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalDate); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocDate); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalDate); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -643,7 +643,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals(queryFields.size() * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } @@ -756,15 +756,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { request.setAggregation(new AggregationDescriptor().setNrOfBins(2).setAggregations(aggregations)); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -797,12 +797,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } for (Aggregation aggregation : aggregations) { - assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value + assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), record.get(column++)); } @@ -859,21 +859,21 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { .setExtrema(extrema)); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); + queryFields.add(EventField.value); request.setFields(queryFields); - Set extremaFields = new LinkedHashSet<>(); + Set extremaFields = new LinkedHashSet<>(); for (Extrema extremum : extrema) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { if (extremum.getAccessor(queryField) != null) { extremaFields.add(queryField); } @@ -900,7 +900,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { CSVParser csvParser = new CSVParser(reader, csvFormat); // will not be included as it is an aggregation - queryFields.remove(QueryField.value); + queryFields.remove(EventField.value); try { long pulse = 0; int totalRows = 2; @@ -914,17 +914,17 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } for (Aggregation aggregation : aggregations) { - assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value + assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), record.get(column++)); } for (Extrema extremum : extrema) { - for (QueryField queryField : extremaFields) { + for (EventField queryField : extremaFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + CSVResponseStreamWriter.FIELDNAME_EXTREMA + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + extremum.name() @@ -995,15 +995,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { request.setAggregation(new AggregationDescriptor().setDurationPerBin(100).setAggregations(aggregations)); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -1025,7 +1025,7 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { } private void checkDateRangeQueryBinSizeAggregate(final List channels, final List aggregations, - final Set queryFields, final String response) throws Exception { + final Set queryFields, final String response) throws Exception { CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(CSVResponseStreamWriter.DELIMITER_CVS); StringReader reader = new StringReader(response); CSVParser csvParser = new CSVParser(reader, csvFormat); @@ -1041,12 +1041,12 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { assertEquals((queryFields.size() + aggregations.size()) * channels.size(), record.size()); int column = 0; for (String channel : channels) { - for (QueryField queryField : queryFields) { + for (EventField queryField : queryFields) { assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + queryField.name(), record.get(column++)); } for (Aggregation aggregation : aggregations) { - assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + QueryField.value + assertEquals(channel + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + EventField.value + CSVResponseStreamWriter.DELIMITER_CHANNELNAME_FIELDNAME + aggregation.name(), record.get(column++)); } @@ -1102,15 +1102,15 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { request.setAggregation(new AggregationDescriptor().setDurationPerBin(100)); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.pulseId); - queryFields.add(QueryField.iocSeconds); - queryFields.add(QueryField.iocMillis); - queryFields.add(QueryField.globalSeconds); - queryFields.add(QueryField.globalMillis); - queryFields.add(QueryField.shape); - queryFields.add(QueryField.eventCount); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.pulseId); + queryFields.add(EventField.iocSeconds); + queryFields.add(EventField.iocMillis); + queryFields.add(EventField.globalSeconds); + queryFields.add(EventField.globalMillis); + queryFields.add(EventField.shape); + queryFields.add(EventField.eventCount); request.setFields(queryFields); String content = mapper.writeValueAsString(request); @@ -1144,9 +1144,9 @@ public class CSVQueryRestControllerTest extends AbstractDaqRestTest { channels); request.setResponse(new CSVHTTPResponse()); - LinkedHashSet queryFields = new LinkedHashSet<>(); - queryFields.add(QueryField.channel); - queryFields.add(QueryField.value); + LinkedHashSet queryFields = new LinkedHashSet<>(); + queryFields.add(EventField.channel); + queryFields.add(EventField.value); request.setFields(queryFields); String content = mapper.writeValueAsString(request); diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTableTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTableTest.java index 86bf2ce..4a0368f 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTableTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTableTest.java @@ -41,7 +41,7 @@ import ch.psi.daq.domain.query.operation.Aggregation; import ch.psi.daq.domain.query.operation.AggregationDescriptor; import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.Extrema; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation; @@ -94,11 +94,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 101), TEST_CHANNEL_NAMES); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -172,12 +172,12 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 101), TEST_CHANNEL_NAMES); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); - request.addField(QueryField.value); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); + request.addField(EventField.value); AggregationDescriptor aggregation = new AggregationDescriptor(AggregationType.value); aggregation.setNrOfBins(1); @@ -237,11 +237,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 101), TEST_CHANNEL_02, TEST_CHANNEL_01); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -314,11 +314,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 101), TEST_CHANNEL_01); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -372,8 +372,8 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem new AggregationDescriptor().setNrOfBins(2), TEST_CHANNEL_NAMES); request.setMapping(new Mapping()); - request.addField(QueryField.pulseId); - request.addField(QueryField.eventCount); + request.addField(EventField.pulseId); + request.addField(EventField.eventCount); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -2230,11 +2230,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 100, 101), channelName); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.setMapping(new Mapping()); request.addValueTransformation( new ValueTransformationSequence( @@ -2277,11 +2277,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 100, 101), channelName); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.setMapping(new Mapping()); request.addValueTransformation( new ValueTransformationSequence( @@ -2321,11 +2321,11 @@ public class JsonQueryRestControllerTableTest extends AbstractDaqRestTest implem 100, 101), channelName, channelName2); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.setMapping(new Mapping()); request.addValueTransformation( new ValueTransformationSequence( diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTest.java index 57bc6a3..268e4da 100644 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTest.java +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/JsonQueryRestControllerTest.java @@ -37,7 +37,7 @@ import ch.psi.daq.domain.query.operation.AggregationDescriptor; import ch.psi.daq.domain.query.operation.AggregationType; import ch.psi.daq.domain.query.operation.Compression; import ch.psi.daq.domain.query.operation.Extrema; -import ch.psi.daq.domain.query.operation.QueryField; +import ch.psi.daq.domain.query.operation.EventField; import ch.psi.daq.domain.query.transform.ValueTransformationSequence; import ch.psi.daq.domain.query.transform.image.ImageDownScaleValueTransformation; import ch.psi.daq.domain.query.transform.image.ImageEncodingValueTransformation; @@ -289,11 +289,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements 100, 101), TEST_CHANNEL_NAMES); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -350,8 +350,8 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements 199), new AggregationDescriptor().setNrOfBins(2), TEST_CHANNEL_NAMES); - request.addField(QueryField.pulseId); - request.addField(QueryField.eventCount); + request.addField(EventField.pulseId); + request.addField(EventField.eventCount); String content = mapper.writeValueAsString(request); System.out.println(content); @@ -1210,11 +1210,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements 100, 101), channelName); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.addValueTransformation( new ValueTransformationSequence( ValueTransformationSequence.ALL_CHANNELS, @@ -1256,11 +1256,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements 100, 101), channelName); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.addValueTransformation( new ValueTransformationSequence( channelName, @@ -1299,11 +1299,11 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements 100, 101), channelName, channelName2); - request.addField(QueryField.pulseId); - request.addField(QueryField.globalSeconds); - request.addField(QueryField.globalMillis); - request.addField(QueryField.iocSeconds); - request.addField(QueryField.iocMillis); + request.addField(EventField.pulseId); + request.addField(EventField.globalSeconds); + request.addField(EventField.globalMillis); + request.addField(EventField.iocSeconds); + request.addField(EventField.iocMillis); request.addValueTransformation( new ValueTransformationSequence( null, diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelConfigurationTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelConfigurationTest.java new file mode 100644 index 0000000..2bc4a90 --- /dev/null +++ b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelConfigurationTest.java @@ -0,0 +1,473 @@ +package ch.psi.daq.test.queryrest.controller; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.stream.Collectors; + +import javax.annotation.Resource; + +import org.junit.After; +import org.junit.Test; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; +import org.springframework.test.web.servlet.result.MockMvcResultHandlers; +import org.springframework.test.web.servlet.result.MockMvcResultMatchers; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; + +import ch.psi.bsread.message.Type; +import ch.psi.daq.common.ordering.Ordering; +import ch.psi.daq.common.time.TimeUtils; +import ch.psi.daq.domain.backend.Backend; +import ch.psi.daq.domain.config.DomainConfig; +import ch.psi.daq.domain.events.ChannelConfiguration; +import ch.psi.daq.domain.events.impl.ChannelConfigurationImpl; +import ch.psi.daq.domain.json.ChannelConfigurations; +import ch.psi.daq.domain.json.ChannelConfigurationsList; +import ch.psi.daq.domain.query.DAQConfigQuery; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsRequest; +import ch.psi.daq.domain.query.channels.ChannelConfigurationsResponse; +import ch.psi.daq.domain.request.range.RequestRangePulseId; +import ch.psi.daq.test.queryrest.AbstractDaqRestTest; + +/** + * Tests the {@link DaqController} implementation. + */ +public class QueryRestControllerChannelConfigurationTest extends AbstractDaqRestTest { + + @Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT) + private Backend backend; + + private ObjectMapper objectMapper = new ObjectMapper(); + + @After + public void tearDown() throws Exception {} + + @Test + public void testChannelConfigurationQuery_01() throws Exception { + DAQConfigQuery query = new DAQConfigQuery( + new RequestRangePulseId( + 100, + 101), + backend.getName() + "1", backend.getName() + "2"); + + String content = mapper.writeValueAsString(query); + System.out.println(content); + + MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_QUERY_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andReturn(); + + String response = result.getResponse().getContentAsString(); + System.out.println("Response: " + response); + + // test conversion used in DAQProcessing + ChannelConfigurationsList channelConfigurations = + objectMapper.readValue(response, ChannelConfigurationsList.class); + assertEquals(2, channelConfigurations.size()); + ChannelConfigurations configs = channelConfigurations.get(0); + assertEquals(backend.getName() + "1", configs.getChannel().getName()); + assertEquals(backend, configs.getChannel().getBackend()); + List configsList = configs.getConfigs().collect(Collectors.toList()); + assertEquals(2, configsList.size()); + ChannelConfiguration config = configsList.get(0); + assertEquals(backend.getName() + "1", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getStartPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configsList.get(1); + assertEquals(backend.getName() + "1", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getEndPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + + configs = channelConfigurations.get(1); + assertEquals(backend.getName() + "2", configs.getChannel().getName()); + assertEquals(backend, configs.getChannel().getBackend()); + configsList = configs.getConfigs().collect(Collectors.toList()); + assertEquals(2, configsList.size()); + config = configsList.get(0); + assertEquals(backend.getName() + "2", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getStartPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configsList.get(1); + assertEquals(backend.getName() + "2", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getEndPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + + // test if backend and channel/name not set + this.mockMvc.perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_QUERY_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(backend.getName() + "1")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].name").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].backend").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].type").value(config.getType())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[0].shape[0]").value(config.getShape()[0])) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].name").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].backend").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].type").value(config.getType())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[1].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[0].configs[2]").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].name").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].backend").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].type").value(config.getType())) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[0].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].name").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].backend").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].type").value(config.getType())) + .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[1].shape[0]").value(config.getShape()[0])) .andExpect(MockMvcResultMatchers.jsonPath("$[1].configs[2]").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[2]").doesNotExist()); + } + + @Test + public void testChannelConfigurationQuery_02() throws Exception { + DAQConfigQuery query = new DAQConfigQuery( + new RequestRangePulseId( + 100, + 101), + backend.getName() + "1", backend.getName() + "2"); + query.setOrdering(Ordering.desc); + + String content = mapper.writeValueAsString(query); + System.out.println(content); + + MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_QUERY_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andReturn(); + + String response = result.getResponse().getContentAsString(); + System.out.println("Response: " + response); + + // test conversion used in DAQProcessing + ChannelConfigurationsList channelConfigurations = + objectMapper.readValue(response, ChannelConfigurationsList.class); + assertEquals(2, channelConfigurations.size()); + ChannelConfigurations configs = channelConfigurations.get(0); + assertEquals(backend.getName() + "1", configs.getChannel().getName()); + assertEquals(backend, configs.getChannel().getBackend()); + List configsList = configs.getConfigs().collect(Collectors.toList()); + assertEquals(2, configsList.size()); + ChannelConfiguration config = configsList.get(0); + assertEquals(backend.getName() + "1", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getEndPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configsList.get(1); + assertEquals(backend.getName() + "1", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getStartPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + + configs = channelConfigurations.get(1); + assertEquals(backend.getName() + "2", configs.getChannel().getName()); + assertEquals(backend, configs.getChannel().getBackend()); + configsList = configs.getConfigs().collect(Collectors.toList()); + assertEquals(2, configsList.size()); + config = configsList.get(0); + assertEquals(backend.getName() + "2", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getEndPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configsList.get(1); + assertEquals(backend.getName() + "2", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), config.getGlobalTime()); + assertEquals(query.getRange().getStartPulseId(), config.getPulseId()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertEquals(Type.Int32.getKey(), config.getType()); + assertEquals("unknown", config.getSource()); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + } + + @Test + public void testChannelConfigurations_01() throws Exception { + ChannelConfigurationsRequest request = new ChannelConfigurationsRequest( + "int32"); + + String content = mapper.writeValueAsString(request); + System.out.println(content); + + MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_CHANNELS_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andReturn(); + + String response = result.getResponse().getContentAsString(); + System.out.println("Response: " + response); + + // test conversion used in DAQProcessing + List channelConfigurations = + objectMapper.readValue(response, new TypeReference>() {}); + assertEquals(3, channelConfigurations.size()); + ChannelConfigurationsResponse configResponse = channelConfigurations.get(0); + assertEquals(backend, configResponse.getBackend()); + + List configs = configResponse.getChannels().collect(Collectors.toList()); + assertEquals(4, configs.size()); + ChannelConfiguration config = configs.get(0); + assertEquals("Int32Scalar", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("int32", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(1); + assertEquals("Int32Waveform", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("int32", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {8}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(2); + assertEquals("UInt32Scalar", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("uint32", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(3); + assertEquals("UInt32Waveform", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("uint32", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {8}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + } + + @Test + public void testChannelConfigurations_02() throws Exception { + MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders + .get(DomainConfig.PATH_CHANNELS_CONFIG + "/int16") + .contentType(MediaType.APPLICATION_JSON)) + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andReturn(); + + String response = result.getResponse().getContentAsString(); + System.out.println("Response: " + response); + + // test conversion used in DAQProcessing + List channelConfigurations = + objectMapper.readValue(response, new TypeReference>() {}); + assertEquals(3, channelConfigurations.size()); + ChannelConfigurationsResponse configResponse = channelConfigurations.get(0); + assertEquals(backend, configResponse.getBackend()); + + List configs = configResponse.getChannels().collect(Collectors.toList()); + assertEquals(4, configs.size()); + ChannelConfiguration config = configs.get(0); + assertEquals("Int16Scalar", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("int16", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(1); + assertEquals("Int16Waveform", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("int16", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {8}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(2); + assertEquals("UInt16Scalar", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("uint16", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {1}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + config = configs.get(3); + assertEquals("UInt16Waveform", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("uint16", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {8}, config.getShape()); + assertNull(config.getGlobalTime()); + assertTrue(config.getPulseId() == 0); + assertEquals(0, config.getModulo()); + assertEquals(0, config.getOffset()); + } + + @Test + public void testChannelConfigurations_03() throws Exception { + MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders + .get(DomainConfig.PATH_CHANNEL_CONFIG + "/Int16Waveform") + .contentType(MediaType.APPLICATION_JSON)) + .andDo(MockMvcResultHandlers.print()) + .andExpect(MockMvcResultMatchers.status().isOk()) + .andReturn(); + + String response = result.getResponse().getContentAsString(); + System.out.println("Response: " + response); + + // test conversion used in DAQProcessing + ChannelConfiguration config = + objectMapper.readValue(response, ChannelConfigurationImpl.class); + assertEquals("Int16Waveform", config.getChannel()); + assertEquals(backend, config.getBackend()); + assertEquals("int16", config.getType()); + assertEquals("unknown", config.getSource()); + assertArrayEquals(new int[] {8}, config.getShape()); + } + + @Test + public void testChannelConfigurationsBackendOrder() throws Exception { + ChannelConfigurationsRequest request = new ChannelConfigurationsRequest( + "int64", Ordering.asc, backend); + + String content = mapper.writeValueAsString(request); + System.out.println(content); + + this.mockMvc + .perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_CHANNELS_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + + .andExpect(MockMvcResultMatchers.status().isOk()) + .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].name").value("Int64Scalar")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].name").value("Int64Waveform")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].name").value("UInt64Scalar")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].name").value("UInt64Waveform")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[4]").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1]").doesNotExist()); + + + request = new ChannelConfigurationsRequest( + "int64", Ordering.desc, backend); + + content = mapper.writeValueAsString(request); + System.out.println(content); + + this.mockMvc + .perform(MockMvcRequestBuilders + .post(DomainConfig.PATH_CHANNELS_CONFIG) + .contentType(MediaType.APPLICATION_JSON) + .content(content)) + + .andExpect(MockMvcResultMatchers.status().isOk()) + .andExpect(MockMvcResultMatchers.jsonPath("$").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].name").value("UInt64Waveform")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].name").value("UInt64Scalar")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].name").value("Int64Waveform")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[2].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].name").value("Int64Scalar")) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].backend").value(backend.getName())) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3].channel").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[4]").doesNotExist()) + .andExpect(MockMvcResultMatchers.jsonPath("$[1]").doesNotExist()); + } +} diff --git a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java b/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java deleted file mode 100644 index 1352374..0000000 --- a/src/test/java/ch/psi/daq/test/queryrest/controller/QueryRestControllerChannelInfoTest.java +++ /dev/null @@ -1,178 +0,0 @@ -package ch.psi.daq.test.queryrest.controller; - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; - -import java.util.List; -import java.util.stream.Collectors; - -import javax.annotation.Resource; - -import org.junit.After; -import org.junit.Test; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; -import org.springframework.test.web.servlet.result.MockMvcResultHandlers; -import org.springframework.test.web.servlet.result.MockMvcResultMatchers; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import ch.psi.bsread.message.Type; -import ch.psi.daq.common.ordering.Ordering; -import ch.psi.daq.common.time.TimeUtils; -import ch.psi.daq.domain.backend.Backend; -import ch.psi.daq.domain.config.DomainConfig; -import ch.psi.daq.domain.json.channels.info.ChannelInfo; -import ch.psi.daq.domain.json.channels.info.ChannelInfos; -import ch.psi.daq.domain.json.channels.info.ChannelInfosList; -import ch.psi.daq.domain.query.ChannelNameRequest; -import ch.psi.daq.domain.request.range.RequestRangePulseId; -import ch.psi.daq.test.queryrest.AbstractDaqRestTest; - -/** - * Tests the {@link DaqController} implementation. - */ -public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest { - - @Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT) - private Backend backend; - - private ObjectMapper objectMapper = new ObjectMapper(); - - @After - public void tearDown() throws Exception {} - - @Test - public void testChannelInfoQuery_01() throws Exception { - ChannelNameRequest query = new ChannelNameRequest( - new RequestRangePulseId( - 100, - 101), - backend.getName() + "1", backend.getName() + "2"); - - String content = mapper.writeValueAsString(query); - System.out.println(content); - - MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders - .post(DomainConfig.PATH_CHANNELS_INFO) - .contentType(MediaType.APPLICATION_JSON) - .content(content)) - .andDo(MockMvcResultHandlers.print()) - .andExpect(MockMvcResultMatchers.status().isOk()) - .andReturn(); - - String response = result.getResponse().getContentAsString(); - System.out.println("Response: " + response); - - // test conversion used in DAQProcessing - List infosList = objectMapper.readValue(response, ChannelInfosList.class); - assertEquals(2, infosList.size()); - ChannelInfos cInfos = infosList.get(0); - assertEquals(backend.getName() + "1", cInfos.getChannel().getName()); - assertEquals(backend, cInfos.getChannel().getBackend()); - List infos = cInfos.getChannelInfos().collect(Collectors.toList()); - assertEquals(2, infos.size()); - ChannelInfo info = infos.get(0); - assertEquals(backend.getName() + "1", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getStartPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - info = infos.get(1); - assertEquals(backend.getName() + "1", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getEndPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - - cInfos = infosList.get(1); - assertEquals(backend.getName() + "2", cInfos.getChannel().getName()); - assertEquals(backend, cInfos.getChannel().getBackend()); - infos = cInfos.getChannelInfos().collect(Collectors.toList()); - assertEquals(2, infos.size()); - info = infos.get(0); - assertEquals(backend.getName() + "2", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getStartPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - info = infos.get(1); - assertEquals(backend.getName() + "2", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getEndPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - } - - @Test - public void testChannelInfoQuery_02() throws Exception { - ChannelNameRequest query = new ChannelNameRequest( - new RequestRangePulseId( - 100, - 101), - backend.getName() + "1", backend.getName() + "2"); - query.setOrdering(Ordering.desc); - - String content = mapper.writeValueAsString(query); - System.out.println(content); - - MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders - .post(DomainConfig.PATH_CHANNELS_INFO) - .contentType(MediaType.APPLICATION_JSON) - .content(content)) - .andDo(MockMvcResultHandlers.print()) - .andExpect(MockMvcResultMatchers.status().isOk()) - .andReturn(); - - String response = result.getResponse().getContentAsString(); - System.out.println("Response: " + response); - - // test conversion used in DAQProcessing - List infosList = objectMapper.readValue(response, ChannelInfosList.class); - assertEquals(2, infosList.size()); - ChannelInfos cInfos = infosList.get(0); - assertEquals(backend.getName() + "1", cInfos.getChannel().getName()); - assertEquals(backend, cInfos.getChannel().getBackend()); - List infos = cInfos.getChannelInfos().collect(Collectors.toList()); - assertEquals(2, infos.size()); - ChannelInfo info = infos.get(0); - assertEquals(backend.getName() + "1", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getEndPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - info = infos.get(1); - assertEquals(backend.getName() + "1", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getStartPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - - cInfos = infosList.get(1); - assertEquals(backend.getName() + "2", cInfos.getChannel().getName()); - assertEquals(backend, cInfos.getChannel().getBackend()); - infos = cInfos.getChannelInfos().collect(Collectors.toList()); - assertEquals(2, infos.size()); - info = infos.get(0); - assertEquals(backend.getName() + "2", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getEndPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - info = infos.get(1); - assertEquals(backend.getName() + "2", info.getChannel()); - assertEquals(backend, info.getBackend()); - assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime()); - assertEquals(query.getRange().getStartPulseId(), info.getPulseId()); - assertArrayEquals(new int[] {1}, info.getShape()); - assertEquals(Type.Int32.getKey(), info.getType()); - } -} diff --git a/src/test/resources/queryrest-test.properties b/src/test/resources/queryrest-test.properties index a1078f1..552a4ef 100644 --- a/src/test/resources/queryrest-test.properties +++ b/src/test/resources/queryrest-test.properties @@ -8,7 +8,7 @@ query.hazelcast.node=true # the base for the keyspaces domain.keyspace.base=daq_query_test -channelname.cache.reload.period=-1 +channels.cache.reload.period=-1 query.min.time=1970-01-01T00:00:00.000000000+00:00