Configurable Backend.
This commit is contained in:
@ -13,16 +13,15 @@ import javax.annotation.Resource;
|
||||
import org.msgpack.jackson.dataformat.MessagePackFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.http.converter.HttpMessageConverter;
|
||||
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.validation.Validator;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
@ -31,6 +30,8 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory;
|
||||
|
||||
import ch.psi.daq.common.statistic.Statistics;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.config.DomainConfigCORS;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
|
||||
@ -56,8 +57,10 @@ import ch.psi.daq.queryrest.response.smile.SmileTableResponseStreamWriter;
|
||||
@Configuration
|
||||
@Import(value = DomainConfigCORS.class)
|
||||
@PropertySource(value = {"classpath:queryrest.properties"})
|
||||
@PropertySource(value = {"file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true)
|
||||
public class QueryRestConfig extends WebMvcConfigurerAdapter {
|
||||
@PropertySource(value = {
|
||||
"classpath:queryrest-${daq.config.environment}.properties",
|
||||
"file:${user.home}/.config/daq/queryrest.properties"}, ignoreResourceNotFound = true)
|
||||
public class QueryRestConfig { // extends WebMvcConfigurerAdapter {
|
||||
|
||||
private static final String QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS = "queryrest.default.response.aggregations";
|
||||
|
||||
@ -79,19 +82,38 @@ public class QueryRestConfig extends WebMvcConfigurerAdapter {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryRestConfig.class);
|
||||
|
||||
public static final String BEAN_NAME_QUERY_MANAGER = "queryManager";
|
||||
public static final String BEAN_NAME_QUERY_ANALIZER_FACTORY = "queryAnalizerFactory";
|
||||
public static final String BEAN_NAME_QUERY_VALIDATOR = "queryValidator";
|
||||
public static final String BEAN_NAME_JSON_FACTORY = "jsonFactory";
|
||||
public static final String BEAN_NAME_MSG_PACK_FACTORY = "msgPackFactory";
|
||||
public static final String BEAN_NAME_SMILE_FACTORY = "smileFactory";
|
||||
public static final String BEAN_NAME_DEFAULT_RESPONSE_FIELDS = "defaultResponseFields";
|
||||
public static final String BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS = "defaultResponseAggregations";
|
||||
// public static final String BEAN_NAME_CORS_ALLOWEDORIGINS = "corsAllowedorigins";
|
||||
// public static final String BEAN_NAME_CORS_FORCEALLHEADERS = "corsForceallheaders";
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
private Environment env;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
env = context.getEnvironment();
|
||||
objectMapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
|
||||
initObjectMapper(objectMapper);
|
||||
|
||||
// init BackendAccesses
|
||||
final List<Backend> backends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS, List.class);
|
||||
for (final Backend backend : backends) {
|
||||
// backends instance is different to objectMapper above
|
||||
initObjectMapper(
|
||||
backend.getApplicationContext().getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class));
|
||||
}
|
||||
}
|
||||
|
||||
private void initObjectMapper(ObjectMapper objectMapper) {
|
||||
// only include non-null values
|
||||
objectMapper.setSerializationInclusion(Include.NON_NULL);
|
||||
// Mixin which is used dynamically to filter out which properties get serialised and which
|
||||
@ -104,83 +126,103 @@ public class QueryRestConfig extends WebMvcConfigurerAdapter {
|
||||
objectMapper.addMixIn(Response.class, PolymorphicResponseMixIn.class);
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
|
||||
// final MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter();
|
||||
// /**
|
||||
// * This is necessary so that the message conversion uses the configured object mapper.
|
||||
// * Otherwise, a separate object mapper is instantiated for Springs message conversion.
|
||||
// */
|
||||
// converter.setObjectMapper(objectMapper);
|
||||
// converters.add(converter);
|
||||
// super.configureMessageConverters(converters);
|
||||
// }
|
||||
|
||||
// does this work for json comming from web-requests
|
||||
// -> use WebMvcConfigurationSupport?
|
||||
// https://stackoverflow.com/questions/26639475/how-to-set-context-param-in-spring-boot
|
||||
// @Bean
|
||||
// @Lazy
|
||||
// public MappingJackson2HttpMessageConverter mappingJackson2HttpMessageConverter() {
|
||||
// final MappingJackson2HttpMessageConverter jsonConverter = new MappingJackson2HttpMessageConverter();
|
||||
// jsonConverter.setObjectMapper(objectMapper);
|
||||
// return jsonConverter;
|
||||
// }
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
|
||||
final MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter();
|
||||
/**
|
||||
* This is necessary so that the message conversion uses the configured object mapper.
|
||||
* Otherwise, a separate object mapper is instantiated for Springs message conversion.
|
||||
*/
|
||||
converter.setObjectMapper(objectMapper);
|
||||
converters.add(converter);
|
||||
super.configureMessageConverters(converters);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(name = BEAN_NAME_JSON_FACTORY)
|
||||
@Lazy
|
||||
public JsonFactory jsonFactory() {
|
||||
return new JsonFactory();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@Bean(name = BEAN_NAME_MSG_PACK_FACTORY)
|
||||
@Lazy
|
||||
public MessagePackFactory messagePackFactory() {
|
||||
return new MessagePackFactory();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@Bean(name = BEAN_NAME_SMILE_FACTORY)
|
||||
@Lazy
|
||||
public SmileFactory smileFactory() {
|
||||
return new SmileFactory();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(name = BEAN_NAME_QUERY_ANALIZER_FACTORY)
|
||||
@Lazy
|
||||
public Function<BackendQuery, BackendQueryAnalyzer> queryAnalizerFactory() {
|
||||
return (query) -> new BackendQueryAnalyzerImpl(query);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public JSONResponseStreamWriter jsonResponseStreamWriter() {
|
||||
return new JSONResponseStreamWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public JSONTableResponseStreamWriter jsonTableResponseStreamWriter() {
|
||||
return new JSONTableResponseStreamWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public MsgPackResponseStreamWriter msgPackResponseStreamWriter() {
|
||||
return new MsgPackResponseStreamWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public MsgPackTableResponseStreamWriter msgPackTableResponseStreamWriter() {
|
||||
return new MsgPackTableResponseStreamWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public SmileResponseStreamWriter smileResponseStreamWriter() {
|
||||
return new SmileResponseStreamWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public SmileTableResponseStreamWriter smileTableResponseStreamWriter() {
|
||||
return new SmileTableResponseStreamWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public CSVResponseStreamWriter csvResponseStreamWriter() {
|
||||
return new CSVResponseStreamWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(name = BEAN_NAME_QUERY_MANAGER)
|
||||
@Lazy
|
||||
public QueryManager queryManager() {
|
||||
return new QueryManagerImpl();
|
||||
}
|
||||
|
||||
@Bean(name = BEAN_NAME_DEFAULT_RESPONSE_FIELDS)
|
||||
@Lazy
|
||||
public Set<QueryField> defaultResponseFields() {
|
||||
String[] responseFields =
|
||||
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_FIELDS));
|
||||
@ -200,6 +242,7 @@ public class QueryRestConfig extends WebMvcConfigurerAdapter {
|
||||
}
|
||||
|
||||
@Bean(name = BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
|
||||
@Lazy
|
||||
public Set<Aggregation> defaultResponseAggregations() {
|
||||
String[] responseAggregations =
|
||||
StringUtils.commaDelimitedListToStringArray(env.getProperty(QUERYREST_DEFAULT_RESPONSE_AGGREGATIONS));
|
||||
@ -219,7 +262,8 @@ public class QueryRestConfig extends WebMvcConfigurerAdapter {
|
||||
return defaultResponseAggregations;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(name = BEAN_NAME_QUERY_VALIDATOR)
|
||||
@Lazy
|
||||
public Validator queryValidator() {
|
||||
return new QueryValidator();
|
||||
}
|
||||
|
@ -5,16 +5,15 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.validation.Valid;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.validation.Validator;
|
||||
import org.springframework.web.bind.WebDataBinder;
|
||||
@ -48,37 +47,37 @@ import ch.psi.daq.domain.query.response.ResponseFormat;
|
||||
import ch.psi.daq.domain.query.transform.image.color.ColorModelType;
|
||||
import ch.psi.daq.domain.query.transform.image.resize.ValueAggregation;
|
||||
import ch.psi.daq.domain.request.validate.RequestProviderValidator;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.query.QueryManager;
|
||||
import ch.psi.daq.queryrest.response.AbstractHTTPResponse;
|
||||
import ch.psi.daq.queryrest.response.PolymorphicResponseMixIn;
|
||||
import ch.psi.daq.queryrest.response.json.JSONHTTPResponse;
|
||||
|
||||
@RestController
|
||||
public class QueryRestController {
|
||||
public class QueryRestController implements ApplicationContextAware {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryRestController.class);
|
||||
public static final String PARAMETERS_ROOT_PATH = "params";
|
||||
|
||||
@Resource
|
||||
private ApplicationContext appContext;
|
||||
|
||||
@Resource
|
||||
private Set<Backend> activeBackends;
|
||||
private ApplicationContext context;
|
||||
private ObjectMapper objectMapper;
|
||||
private QueryManager queryManager;
|
||||
private Validator queryValidator;
|
||||
private Validator requestProviderValidator = new RequestProviderValidator();
|
||||
|
||||
@Resource
|
||||
private QueryManager queryManager;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKENDS_ACTIVE)
|
||||
private Set<Backend> activeBackends;
|
||||
|
||||
private Response defaultResponse = new JSONHTTPResponse();
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {}
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
this.context = context;
|
||||
|
||||
activeBackends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS_ACTIVE, Set.class);
|
||||
objectMapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
queryManager = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_MANAGER, QueryManager.class);
|
||||
queryValidator = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_VALIDATOR, Validator.class);
|
||||
}
|
||||
|
||||
@InitBinder
|
||||
protected void initBinder(WebDataBinder binder) {
|
||||
@ -205,7 +204,7 @@ public class QueryRestController {
|
||||
|
||||
Response response = queries.getResponseOrDefault(defaultResponse);
|
||||
if (response instanceof AbstractHTTPResponse) {
|
||||
((AbstractHTTPResponse) response).respond(appContext, queries, res);
|
||||
((AbstractHTTPResponse) response).respond(context, queries, res);
|
||||
} else {
|
||||
String message =
|
||||
String.format(
|
||||
@ -228,7 +227,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link Ordering}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/ordering", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/ordering", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<Ordering> getOrderingValues() {
|
||||
return Lists.newArrayList(Ordering.values());
|
||||
@ -239,7 +238,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link Ordering}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/responseformat", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/responseformat", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<ResponseFormat> getResponseFormatValues() {
|
||||
return Lists.newArrayList(ResponseFormat.values());
|
||||
@ -250,7 +249,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link QueryField}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/queryfields", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/queryfields", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<QueryField> getQueryFieldValues() {
|
||||
return Arrays.stream(QueryField.values())
|
||||
@ -263,7 +262,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link Aggregation}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/aggregations", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregations", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<Aggregation> getAggregationValues() {
|
||||
return Lists.newArrayList(Aggregation.values());
|
||||
@ -274,7 +273,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link AggregationType}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/aggregationtypes", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/aggregationtypes", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<AggregationType> getAggregationTypeValues() {
|
||||
return Lists.newArrayList(AggregationType.values());
|
||||
@ -285,10 +284,10 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link Backend}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/backends", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_BACKENDS, method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<Backend> getBackendValues() {
|
||||
return Stream.of(Backend.values())
|
||||
return Backend.getBackends().stream()
|
||||
.filter(backend -> activeBackends.contains(backend))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
@ -298,7 +297,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link Compression}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/compression", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/compression", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<Compression> getCompressionValues() {
|
||||
return Lists.newArrayList(Compression.values());
|
||||
@ -309,7 +308,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link ValueAggregation}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/valueaggregations", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/valueaggregations", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<ValueAggregation> getValueAggregations() {
|
||||
return Lists.newArrayList(ValueAggregation.values());
|
||||
@ -320,7 +319,7 @@ public class QueryRestController {
|
||||
*
|
||||
* @return list of {@link ValueAggregation}s as String array
|
||||
*/
|
||||
@RequestMapping(value = PARAMETERS_ROOT_PATH + "/colormodeltypes", method = {RequestMethod.GET},
|
||||
@RequestMapping(value = DomainConfig.PATH_PARAMETERS_ROOT + "/colormodeltypes", method = {RequestMethod.GET},
|
||||
produces = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public @ResponseBody List<ColorModelType> getColorModelTypes() {
|
||||
return Lists.newArrayList(ColorModelType.values());
|
||||
|
@ -4,11 +4,14 @@ import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.validation.Errors;
|
||||
import org.springframework.validation.Validator;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.query.DAQQueries;
|
||||
import ch.psi.daq.domain.query.DAQQuery;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
@ -19,38 +22,38 @@ import ch.psi.daq.domain.query.transform.ValueTransformationSequence;
|
||||
import ch.psi.daq.domain.request.Request;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
|
||||
public class QueryValidator implements Validator {
|
||||
|
||||
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS)
|
||||
public class QueryValidator implements Validator, ApplicationContextAware {
|
||||
private Set<QueryField> defaultResponseFields;
|
||||
|
||||
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
|
||||
private Set<Aggregation> defaultResponseAggregations;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public boolean supports(Class<?> clazz) {
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
defaultResponseFields = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS, Set.class);
|
||||
defaultResponseAggregations = context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(final Class<?> clazz) {
|
||||
return DAQQuery.class.isAssignableFrom(clazz) || DAQQueries.class.isAssignableFrom(clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void validate(Object target, Errors errors) {
|
||||
public void validate(final Object target, final Errors errors) {
|
||||
if (target instanceof DAQQuery) {
|
||||
this.checkElement((DAQQuery) target, errors);
|
||||
} else if (target instanceof DAQQueries) {
|
||||
DAQQueries queries = (DAQQueries) target;
|
||||
for (DAQQueryElement daqQueryElement : queries) {
|
||||
final DAQQueries queries = (DAQQueries) target;
|
||||
for (final DAQQueryElement daqQueryElement : queries) {
|
||||
this.checkElement(daqQueryElement, errors);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkElement(DAQQueryElement query, Errors errors) {
|
||||
private void checkElement(final DAQQueryElement query, final Errors errors) {
|
||||
// set default values (if not set)
|
||||
if (query.getFields() == null || query.getFields().isEmpty()) {
|
||||
query.setFields(new LinkedHashSet<>(defaultResponseFields));
|
||||
@ -58,9 +61,9 @@ public class QueryValidator implements Validator {
|
||||
|
||||
if (query.getAggregation() != null) {
|
||||
// check if only one binning element is defined
|
||||
long durationPerBin = query.getAggregation().getDurationPerBin();
|
||||
long pulsesPerBin = query.getAggregation().getPulsesPerBin();
|
||||
int nrOfBins = query.getAggregation().getNrOfBins();
|
||||
final long durationPerBin = query.getAggregation().getDurationPerBin();
|
||||
final long pulsesPerBin = query.getAggregation().getPulsesPerBin();
|
||||
final int nrOfBins = query.getAggregation().getNrOfBins();
|
||||
if ((durationPerBin != Request.NOT_SET && (pulsesPerBin != Request.NOT_SET || nrOfBins != Request.NOT_SET))
|
||||
|| (pulsesPerBin != Request.NOT_SET
|
||||
&& (durationPerBin != Request.NOT_SET || nrOfBins != Request.NOT_SET))
|
||||
@ -96,7 +99,7 @@ public class QueryValidator implements Validator {
|
||||
// without this field, json will not contain transformedValue
|
||||
query.addField(QueryField.transformedValue);
|
||||
|
||||
for (ValueTransformationSequence transformationSequence : query.getValueTransformations()) {
|
||||
for (final ValueTransformationSequence transformationSequence : query.getValueTransformations()) {
|
||||
transformationSequence.setExecutionEnvironment(ExecutionEnvironment.QUERYING);
|
||||
}
|
||||
}
|
||||
|
@ -18,10 +18,10 @@ import ch.psi.daq.domain.query.channels.ChannelsResponse;
|
||||
|
||||
public interface QueryManager {
|
||||
|
||||
List<ChannelsResponse> getChannels(ChannelsRequest request) throws Exception;
|
||||
List<ChannelsResponse> getChannels(final ChannelsRequest request) throws Exception;
|
||||
|
||||
Collection<ChannelInfos> getChannelInfos(ChannelNameRequest request) throws Exception;
|
||||
Collection<ChannelInfos> getChannelInfos(final ChannelNameRequest request) throws Exception;
|
||||
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents(DAQQueries queries)
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents(final DAQQueries queries)
|
||||
throws Exception;
|
||||
}
|
||||
|
@ -8,18 +8,16 @@ import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.backend.BackendAccess;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.json.channels.info.ChannelInfos;
|
||||
@ -33,26 +31,23 @@ import ch.psi.daq.domain.query.channels.ChannelNameCache;
|
||||
import ch.psi.daq.domain.query.channels.ChannelsRequest;
|
||||
import ch.psi.daq.domain.query.channels.ChannelsResponse;
|
||||
import ch.psi.daq.domain.query.processor.QueryProcessor;
|
||||
import ch.psi.daq.query.config.QueryConfig;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.query.model.ChannelInfosStreamImpl;
|
||||
|
||||
public class QueryManagerImpl implements QueryManager {
|
||||
@SuppressWarnings("unused")
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(QueryManagerImpl.class);
|
||||
|
||||
@Resource
|
||||
private ApplicationContext appContext;
|
||||
|
||||
@Resource
|
||||
public class QueryManagerImpl implements QueryManager, ApplicationContextAware {
|
||||
private ChannelNameCache channelNameCache;
|
||||
private Function<BackendQuery, BackendQueryAnalyzer> queryAnalizerFactory;
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_ACCESS)
|
||||
private BackendAccess backendAccess;
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_CHANNEL_NAME_CACHE)
|
||||
private ChannelNameCache channelNameCache;
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {}
|
||||
channelNameCache = context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class);
|
||||
queryAnalizerFactory = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_ANALIZER_FACTORY, Function.class);
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void destroy() {}
|
||||
@ -67,24 +62,21 @@ public class QueryManagerImpl implements QueryManager {
|
||||
return channelNameCache.getChannels(request);
|
||||
}
|
||||
|
||||
public Collection<ChannelInfos> getChannelInfos(ChannelNameRequest request) {
|
||||
public Collection<ChannelInfos> getChannelInfos(final ChannelNameRequest request) {
|
||||
// set backends if not defined yet
|
||||
channelNameCache.configureBackends(request.getChannels());
|
||||
|
||||
Stream<ChannelInfos> stream = request.getRequestsByBackend().entrySet().stream()
|
||||
.filter(entry ->
|
||||
backendAccess.hasDataReader(entry.getKey())
|
||||
&& backendAccess.hasChannelInfoReader(entry.getKey()))
|
||||
final Stream<ChannelInfos> stream = request.getRequestsByBackend().entrySet().stream()
|
||||
.filter(entry -> entry.getKey().getBackendAccess().hasDataReader()
|
||||
&& entry.getKey().getBackendAccess().hasChannelInfoReader())
|
||||
.flatMap(entry -> {
|
||||
return entry.getValue().getChannelInfos(entry.getKey(), backendAccess)
|
||||
return entry.getValue().getChannelInfos(entry.getKey())
|
||||
.entrySet().stream()
|
||||
.map(innerEntry -> {
|
||||
return new ChannelInfosStreamImpl(
|
||||
new ChannelName(innerEntry.getKey(), entry.getKey()),
|
||||
innerEntry.getValue()
|
||||
);
|
||||
}
|
||||
);
|
||||
innerEntry.getValue());
|
||||
});
|
||||
});
|
||||
|
||||
// materialize
|
||||
@ -92,33 +84,32 @@ public class QueryManagerImpl implements QueryManager {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents(DAQQueries queries) {
|
||||
public List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> getEvents(
|
||||
final DAQQueries queries) {
|
||||
// set backends if not defined yet
|
||||
channelNameCache.configureBackends(queries);
|
||||
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results =
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results =
|
||||
new ArrayList<>(queries.getQueries().size());
|
||||
|
||||
for (DAQQueryElement queryElement : queries) {
|
||||
for (final DAQQueryElement queryElement : queries) {
|
||||
Stream<Triple<BackendQuery, ChannelName, ?>> resultStreams =
|
||||
BackendQueryImpl
|
||||
.getBackendQueries(queryElement)
|
||||
.stream()
|
||||
.filter(
|
||||
query ->
|
||||
backendAccess.hasDataReader(query.getBackend())
|
||||
&& backendAccess.hasQueryProcessor(query.getBackend())
|
||||
)
|
||||
query -> query.getBackend().getBackendAccess().hasDataReader()
|
||||
&& query.getBackend().getBackendAccess().hasQueryProcessor())
|
||||
.flatMap(
|
||||
query -> {
|
||||
QueryProcessor processor = backendAccess.getQueryProcessor(query.getBackend());
|
||||
BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query);
|
||||
final QueryProcessor processor = query.getBackend().getBackendAccess().getQueryProcessor();
|
||||
final BackendQueryAnalyzer queryAnalizer = queryAnalizerFactory.apply(query);
|
||||
|
||||
/* all the magic happens here */
|
||||
Stream<Entry<ChannelName, Stream<? extends DataEvent>>> channelToDataEvents =
|
||||
final Stream<Entry<ChannelName, Stream<? extends DataEvent>>> channelToDataEvents =
|
||||
processor.process(queryAnalizer);
|
||||
/* do post-process */
|
||||
Stream<Entry<ChannelName, ?>> channelToData =
|
||||
final Stream<Entry<ChannelName, ?>> channelToData =
|
||||
queryAnalizer.postProcess(channelToDataEvents);
|
||||
|
||||
return channelToData.map(entry -> {
|
||||
|
@ -15,7 +15,7 @@ public class ChannelInfosStreamImpl implements ChannelInfos {
|
||||
|
||||
public ChannelInfosStreamImpl() {}
|
||||
|
||||
public ChannelInfosStreamImpl(ChannelName channel, Stream<? extends ChannelInfo> infos) {
|
||||
public ChannelInfosStreamImpl(final ChannelName channel, final Stream<? extends ChannelInfo> infos) {
|
||||
this.channel = channel;
|
||||
this.infos = infos;
|
||||
}
|
||||
|
@ -15,12 +15,13 @@ import ch.psi.daq.domain.query.response.ResponseImpl;
|
||||
|
||||
public abstract class AbstractHTTPResponse extends ResponseImpl {
|
||||
|
||||
public AbstractHTTPResponse(ResponseFormat format) {
|
||||
public AbstractHTTPResponse(final ResponseFormat format) {
|
||||
super(format);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public abstract void respond(ApplicationContext context, DAQQueries queries, HttpServletResponse httpResponse) throws Exception;
|
||||
public abstract void respond(final ApplicationContext context, final DAQQueries queries,
|
||||
HttpServletResponse httpResponse) throws Exception;
|
||||
|
||||
/**
|
||||
* Configures the output stream and headers according to whether compression is wanted or not.
|
||||
@ -38,15 +39,15 @@ public abstract class AbstractHTTPResponse extends ResponseImpl {
|
||||
* @throws Exception Something goes wrong
|
||||
*/
|
||||
@JsonIgnore
|
||||
protected OutputStream handleCompressionAndResponseHeaders(HttpServletResponse httpResponse,
|
||||
String contentType) throws Exception {
|
||||
protected OutputStream handleCompressionAndResponseHeaders(final HttpServletResponse httpResponse,
|
||||
final String contentType) throws Exception {
|
||||
OutputStream out = httpResponse.getOutputStream();
|
||||
|
||||
httpResponse.setCharacterEncoding(JsonEncoding.UTF8.getJavaName());
|
||||
httpResponse.setContentType(contentType);
|
||||
|
||||
httpResponse.addHeader("Content-Type", contentType);
|
||||
String filename = "data." + this.getFileSuffix();
|
||||
final String filename = "data." + this.getFileSuffix();
|
||||
httpResponse.addHeader("Content-Disposition", "attachment; filename=" + filename);
|
||||
|
||||
if (this.isCompressed()) {
|
||||
|
@ -22,8 +22,9 @@ public interface ResponseStreamWriter {
|
||||
*
|
||||
* @param results The results results
|
||||
* @param out The OutputStream
|
||||
* @param response The Response
|
||||
* @param response The Response
|
||||
* @throws Exception thrown if writing to the output stream fails
|
||||
*/
|
||||
public void respond(List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results, OutputStream out, Response response) throws Exception;
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception;
|
||||
}
|
||||
|
@ -6,19 +6,19 @@ import java.util.function.ToDoubleFunction;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
|
||||
public class AggregationStringifyer implements Function<DataEvent, String> {
|
||||
private ToDoubleFunction<DataEvent> accessor;
|
||||
private String nonValue;
|
||||
private final ToDoubleFunction<DataEvent> accessor;
|
||||
private final String nonValue;
|
||||
|
||||
public AggregationStringifyer(ToDoubleFunction<DataEvent> accessor, String nonValue) {
|
||||
public AggregationStringifyer(final ToDoubleFunction<DataEvent> accessor, final String nonValue) {
|
||||
this.accessor = accessor;
|
||||
this.nonValue = nonValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(DataEvent event) {
|
||||
public String apply(final DataEvent event) {
|
||||
if (event == null) {
|
||||
return nonValue;
|
||||
}else{
|
||||
} else {
|
||||
return Double.toString(accessor.applyAsDouble(event));
|
||||
}
|
||||
}
|
||||
|
@ -36,15 +36,15 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
|
||||
super(ResponseFormat.CSV);
|
||||
}
|
||||
|
||||
public CSVHTTPResponse(Compression compression) {
|
||||
public CSVHTTPResponse(final Compression compression) {
|
||||
this();
|
||||
setCompression(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(ApplicationContext context, DAQQueries queries, HttpServletResponse httpResponse)
|
||||
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse httpResponse)
|
||||
throws Exception {
|
||||
OutputStream out = handleCompressionAndResponseHeaders(httpResponse, CONTENT_TYPE);
|
||||
final OutputStream out = handleCompressionAndResponseHeaders(httpResponse, CONTENT_TYPE);
|
||||
|
||||
// do csv specific validations
|
||||
validateQueries(queries);
|
||||
@ -52,11 +52,11 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
|
||||
try {
|
||||
LOGGER.debug("Executing query '{}'", queries);
|
||||
|
||||
QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class);
|
||||
final QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
final CSVResponseStreamWriter streamWriter = context.getBean(CSVResponseStreamWriter.class);
|
||||
|
||||
// execute query
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
queryManager.getEvents(queries);
|
||||
// write the response back to the client using java 8 streams
|
||||
streamWriter.respond(result, out, this);
|
||||
@ -66,14 +66,14 @@ public class CSVHTTPResponse extends AbstractHTTPResponse {
|
||||
}
|
||||
}
|
||||
|
||||
protected void validateQueries(DAQQueries queries) {
|
||||
for (DAQQueryElement query : queries) {
|
||||
protected void validateQueries(final DAQQueries queries) {
|
||||
for (final DAQQueryElement query : queries) {
|
||||
if (!(query.getAggregation() == null || AggregationType.value.equals(query.getAggregation()
|
||||
.getAggregationType()))) {
|
||||
// We allow only no aggregation or value aggregation as
|
||||
// extrema: nested structure and not clear how to map it to one line
|
||||
// index: value is an array of Statistics whose size is not clear at initialization time
|
||||
String message = "CSV export does not support '" + query.getAggregation().getAggregationType() + "'";
|
||||
final String message = "CSV export does not support '" + query.getAggregation().getAggregationType() + "'";
|
||||
LOGGER.warn(message);
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
|
@ -19,7 +19,6 @@ import java.util.function.Function;
|
||||
import java.util.function.ToLongFunction;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.csv.CSVFormat;
|
||||
@ -28,7 +27,9 @@ import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import ch.psi.daq.common.stream.StreamIterable;
|
||||
import ch.psi.daq.common.stream.match.MapCreator;
|
||||
@ -36,23 +37,26 @@ import ch.psi.daq.common.stream.match.MapFiller;
|
||||
import ch.psi.daq.common.stream.match.Padder;
|
||||
import ch.psi.daq.common.stream.match.StreamMatcher;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
import ch.psi.daq.domain.query.backend.analyzer.BackendQueryAnalyzer;
|
||||
import ch.psi.daq.domain.query.mapping.Mapping;
|
||||
import ch.psi.daq.domain.query.mapping.IncompleteStrategy;
|
||||
import ch.psi.daq.domain.query.mapping.Mapping;
|
||||
import ch.psi.daq.domain.query.operation.Aggregation;
|
||||
import ch.psi.daq.domain.query.operation.Extrema;
|
||||
import ch.psi.daq.domain.query.operation.QueryField;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
|
||||
|
||||
/**
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
public class CSVResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CSVResponseStreamWriter.class);
|
||||
|
||||
public static final Mapping DEFAULT_MAPPING = new Mapping(IncompleteStrategy.FILL_NULL);
|
||||
@ -67,12 +71,18 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
// buckets.
|
||||
private static final ToLongFunction<DataEvent> MATCHER_PROVIDER = (event) -> event.getGlobalMillis() / 10L;
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@Resource
|
||||
private Function<BackendQuery, BackendQueryAnalyzer> queryAnalizerFactory;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
queryAnalizerFactory = context.getBean(QueryRestConfig.BEAN_NAME_QUERY_ANALIZER_FACTORY, Function.class);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
@ -80,7 +90,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
throw new IllegalStateException("CSV format does not allow for multiple queries.");
|
||||
}
|
||||
|
||||
AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
final AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
|
||||
final Map<ChannelName, Stream<DataEvent>> streams = new LinkedHashMap<>(results.size());
|
||||
final List<String> header = new ArrayList<>();
|
||||
@ -110,11 +120,11 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
});
|
||||
});
|
||||
|
||||
Mapping mapping = daqQueryRef.get().getMappingOrDefault(DEFAULT_MAPPING);
|
||||
Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(context, backendQueryRef.get());
|
||||
final Mapping mapping = daqQueryRef.get().getMappingOrDefault(DEFAULT_MAPPING);
|
||||
final Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(backendQueryRef.get());
|
||||
|
||||
// online matching of the stream's content
|
||||
StreamMatcher<ChannelName, DataEvent, Map<ChannelName, DataEvent>> streamMatcher =
|
||||
final StreamMatcher<ChannelName, DataEvent, Map<ChannelName, DataEvent>> streamMatcher =
|
||||
new StreamMatcher<>(
|
||||
KEY_PROVIDER,
|
||||
MATCHER_PROVIDER,
|
||||
@ -123,10 +133,10 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
null,
|
||||
padder,
|
||||
streams.values());
|
||||
Iterator<Map<ChannelName, DataEvent>> streamsMatchIter = streamMatcher.iterator();
|
||||
final Iterator<Map<ChannelName, DataEvent>> streamsMatchIter = streamMatcher.iterator();
|
||||
|
||||
// prepare csv output
|
||||
CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(DELIMITER_CVS);
|
||||
final CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(DELIMITER_CVS);
|
||||
Writer writer = null;
|
||||
CSVPrinter csvFilePrinter = null;
|
||||
|
||||
@ -142,7 +152,7 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
final Map<ChannelName, DataEvent> match = streamsMatchIter.next();
|
||||
|
||||
// ensure correct order
|
||||
Stream<String> rowStream = accessors.stream().sequential()
|
||||
final Stream<String> rowStream = accessors.stream().sequential()
|
||||
.map(accessorPair -> {
|
||||
DataEvent event = match.get(accessorPair.getKey());
|
||||
if (event != null) {
|
||||
@ -187,18 +197,19 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
}
|
||||
|
||||
|
||||
private void setupChannelColumns(DAQQueryElement daqQuery, BackendQuery backendQuery, ChannelName channelName,
|
||||
Collection<String> header, Collection<Pair<ChannelName, Function<DataEvent, String>>> accessors) {
|
||||
Set<QueryField> queryFields = daqQuery.getFields();
|
||||
List<Aggregation> aggregations =
|
||||
private void setupChannelColumns(final DAQQueryElement daqQuery, final BackendQuery backendQuery,
|
||||
final ChannelName channelName,
|
||||
final Collection<String> header, Collection<Pair<ChannelName, Function<DataEvent, String>>> accessors) {
|
||||
final Set<QueryField> queryFields = daqQuery.getFields();
|
||||
final List<Aggregation> aggregations =
|
||||
daqQuery.getAggregation() != null ? daqQuery.getAggregation().getAggregations() : null;
|
||||
List<Extrema> extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null;
|
||||
final List<Extrema> extrema = daqQuery.getAggregation() != null ? daqQuery.getAggregation().getExtrema() : null;
|
||||
|
||||
BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
|
||||
final BackendQueryAnalyzer queryAnalyzer = queryAnalizerFactory.apply(backendQuery);
|
||||
|
||||
for (QueryField field : queryFields) {
|
||||
for (final QueryField field : queryFields) {
|
||||
if (!(QueryField.value.equals(field) && queryAnalyzer.isAggregationEnabled())) {
|
||||
StringBuilder buf = new StringBuilder(3)
|
||||
final StringBuilder buf = new StringBuilder(3)
|
||||
.append(channelName.getName())
|
||||
.append(DELIMITER_CHANNELNAME_FIELDNAME)
|
||||
.append(field.name());
|
||||
@ -210,8 +221,8 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
}
|
||||
|
||||
if (aggregations != null && queryAnalyzer.isAggregationEnabled()) {
|
||||
for (Aggregation aggregation : aggregations) {
|
||||
StringBuilder buf = new StringBuilder(5)
|
||||
for (final Aggregation aggregation : aggregations) {
|
||||
final StringBuilder buf = new StringBuilder(5)
|
||||
.append(channelName.getName())
|
||||
.append(DELIMITER_CHANNELNAME_FIELDNAME)
|
||||
.append(QueryField.value.name())
|
||||
@ -224,11 +235,11 @@ public class CSVResponseStreamWriter implements ResponseStreamWriter {
|
||||
}
|
||||
|
||||
if (extrema != null && queryAnalyzer.isAggregationEnabled()) {
|
||||
for (Extrema extremum : extrema) {
|
||||
for (QueryField field : queryFields) {
|
||||
Function<DataEvent, Object> accessor = extremum.getAccessor(field);
|
||||
for (final Extrema extremum : extrema) {
|
||||
for (final QueryField field : queryFields) {
|
||||
final Function<DataEvent, Object> accessor = extremum.getAccessor(field);
|
||||
if (accessor != null) {
|
||||
StringBuilder buf = new StringBuilder(7)
|
||||
final StringBuilder buf = new StringBuilder(7)
|
||||
.append(channelName.getName())
|
||||
.append(DELIMITER_CHANNELNAME_FIELDNAME)
|
||||
.append(FIELDNAME_EXTREMA)
|
||||
|
@ -37,22 +37,22 @@ public class JSONHTTPResponse extends AbstractHTTPResponse {
|
||||
super(ResponseFormat.JSON);
|
||||
}
|
||||
|
||||
public JSONHTTPResponse(Compression compression) {
|
||||
public JSONHTTPResponse(final Compression compression) {
|
||||
this();
|
||||
setCompression(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(ApplicationContext context, DAQQueries queries, HttpServletResponse response) throws Exception {
|
||||
OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception {
|
||||
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
|
||||
boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
|
||||
try {
|
||||
LOGGER.debug("Executing query '{}'", queries);
|
||||
|
||||
QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
ResponseStreamWriter streamWriter;
|
||||
final QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
final ResponseStreamWriter streamWriter;
|
||||
if (hasMapping) {
|
||||
streamWriter = context.getBean(JSONTableResponseStreamWriter.class);
|
||||
} else {
|
||||
@ -60,7 +60,7 @@ public class JSONHTTPResponse extends AbstractHTTPResponse {
|
||||
}
|
||||
|
||||
// execute query
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
queryManager.getEvents(queries);
|
||||
// write the response back to the client using java 8 streams
|
||||
streamWriter.respond(result, out, this);
|
||||
@ -71,10 +71,10 @@ public class JSONHTTPResponse extends AbstractHTTPResponse {
|
||||
}
|
||||
|
||||
|
||||
public static boolean validateQueries(DAQQueries queries) {
|
||||
public static boolean validateQueries(final DAQQueries queries) {
|
||||
boolean hasMapping = false;
|
||||
|
||||
for (DAQQueryElement query : queries) {
|
||||
for (final DAQQueryElement query : queries) {
|
||||
if (query.getMapping() != null) {
|
||||
hasMapping = true;
|
||||
|
||||
|
@ -8,12 +8,14 @@ import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonEncoding;
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
@ -23,6 +25,8 @@ import com.fasterxml.jackson.databind.ObjectWriter;
|
||||
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
|
||||
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
@ -30,44 +34,52 @@ import ch.psi.daq.domain.query.operation.Aggregation;
|
||||
import ch.psi.daq.domain.query.operation.Extrema;
|
||||
import ch.psi.daq.domain.query.operation.QueryField;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
|
||||
|
||||
/**
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
public class JSONResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
|
||||
public static final String DATA_RESP_FIELD = "data";
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(JSONResponseStreamWriter.class);
|
||||
|
||||
@Resource
|
||||
private JsonFactory jsonFactory;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper mapper;
|
||||
private JsonFactory factory;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
respond(jsonFactory, mapper, results, out, response);
|
||||
respond(factory, mapper, results, out, response);
|
||||
}
|
||||
|
||||
public static Set<String> getFields(DAQQueryElement query, boolean removeIdentifiers) {
|
||||
Set<QueryField> queryFields = query.getFields();
|
||||
List<Aggregation> aggregations = query.getAggregation() != null ? query.getAggregation().getAggregations() : null;
|
||||
List<Extrema> extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null;
|
||||
public static Set<String> getFields(final DAQQueryElement query, final boolean removeIdentifiers) {
|
||||
final Set<QueryField> queryFields = query.getFields();
|
||||
final List<Aggregation> aggregations =
|
||||
query.getAggregation() != null ? query.getAggregation().getAggregations() : null;
|
||||
final List<Extrema> extrema = query.getAggregation() != null ? query.getAggregation().getExtrema() : null;
|
||||
|
||||
Set<String> includedFields =
|
||||
final Set<String> includedFields =
|
||||
new LinkedHashSet<String>(queryFields.size() + (aggregations != null ? aggregations.size() : 0)
|
||||
+ (extrema != null ? extrema.size() : 0));
|
||||
|
||||
for (QueryField field : queryFields) {
|
||||
for (final QueryField field : queryFields) {
|
||||
includedFields.add(field.name());
|
||||
}
|
||||
if (aggregations != null) {
|
||||
for (Aggregation aggregation : aggregations) {
|
||||
for (final Aggregation aggregation : aggregations) {
|
||||
includedFields.add(aggregation.name());
|
||||
}
|
||||
}
|
||||
@ -85,11 +97,12 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
return includedFields;
|
||||
}
|
||||
|
||||
public static void respond(final JsonFactory factory, final ObjectMapper mapper, final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
|
||||
public static void respond(final JsonFactory factory, final ObjectMapper mapper,
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
|
||||
final AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
|
||||
|
||||
try {
|
||||
if (results.size() > 1) {
|
||||
@ -98,9 +111,9 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
results
|
||||
.forEach(entryy -> {
|
||||
DAQQueryElement daqQuery = entryy.getKey();
|
||||
Set<String> includedFields = getFields(daqQuery, true);
|
||||
ObjectWriter writer = configureWriter(includedFields, mapper);
|
||||
final DAQQueryElement daqQuery = entryy.getKey();
|
||||
final Set<String> includedFields = getFields(daqQuery, true);
|
||||
final ObjectWriter writer = configureWriter(includedFields, mapper);
|
||||
|
||||
try {
|
||||
generator.writeStartArray();
|
||||
@ -121,9 +134,9 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
LOGGER.error("Could not write channel name of channel '{}'", triple.getMiddle(),
|
||||
e);
|
||||
exception.compareAndSet(null, e);
|
||||
} finally{
|
||||
if(triple.getRight() instanceof Stream){
|
||||
((Stream<?>)(triple.getRight())).close();
|
||||
} finally {
|
||||
if (triple.getRight() instanceof Stream) {
|
||||
((Stream<?>) (triple.getRight())).close();
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -132,7 +145,7 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Exception while writing json for '{}'", daqQuery.getChannels(), e);
|
||||
exception.compareAndSet(null, e);
|
||||
}
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
if (results.size() > 1) {
|
||||
@ -157,11 +170,11 @@ public class JSONResponseStreamWriter implements ResponseStreamWriter {
|
||||
* @param mapper The ObjectMapper
|
||||
* @return the configured writer that includes the specified fields
|
||||
*/
|
||||
public static ObjectWriter configureWriter(Set<String> includedFields, ObjectMapper mapper) {
|
||||
SimpleFilterProvider propertyFilter = new SimpleFilterProvider();
|
||||
public static ObjectWriter configureWriter(final Set<String> includedFields, final ObjectMapper mapper) {
|
||||
final SimpleFilterProvider propertyFilter = new SimpleFilterProvider();
|
||||
propertyFilter.addFilter("namedPropertyFilter", SimpleBeanPropertyFilter.filterOutAllExcept(includedFields));
|
||||
// only write the properties not excluded in the filter
|
||||
ObjectWriter writer = mapper.writer(propertyFilter);
|
||||
final ObjectWriter writer = mapper.writer(propertyFilter);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
|
@ -15,14 +15,14 @@ import java.util.function.ToLongFunction;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonEncoding;
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
@ -36,14 +36,16 @@ import ch.psi.daq.common.stream.match.Padder;
|
||||
import ch.psi.daq.common.stream.match.StreamMatcher;
|
||||
import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
import ch.psi.daq.domain.query.bin.BinningStrategy;
|
||||
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinPulse;
|
||||
import ch.psi.daq.domain.query.bin.strategy.BinningStrategyPerBinTime;
|
||||
import ch.psi.daq.domain.query.mapping.Mapping;
|
||||
import ch.psi.daq.domain.query.mapping.IncompleteStrategy;
|
||||
import ch.psi.daq.domain.query.mapping.Mapping;
|
||||
import ch.psi.daq.domain.query.operation.Aggregation;
|
||||
import ch.psi.daq.domain.query.operation.QueryField;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
@ -56,7 +58,7 @@ import ch.psi.daq.queryrest.response.ResponseStreamWriter;
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
public class JSONTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(JSONTableResponseStreamWriter.class);
|
||||
|
||||
@ -69,24 +71,24 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
private static final ToLongFunction<DataEvent> MATCHER_PROVIDER = (event) -> event.getGlobalMillis()
|
||||
/ MILLIS_PER_PULSE;
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@Resource
|
||||
private JsonFactory jsonFactory;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper mapper;
|
||||
|
||||
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
|
||||
private Set<Aggregation> defaultResponseAggregations;
|
||||
|
||||
private JsonFactory factory;
|
||||
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
|
||||
// aggregations
|
||||
private Set<String> defaultResponseAggregationsStr;
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_JSON_FACTORY, JsonFactory.class);
|
||||
|
||||
final Set<Aggregation> defaultResponseAggregations =
|
||||
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
|
||||
defaultResponseAggregationsStr =
|
||||
defaultResponseAggregations.stream().map(Aggregation::name)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
@ -95,16 +97,17 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
respond(context, jsonFactory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
}
|
||||
|
||||
|
||||
public static void respond(final ApplicationContext context, final JsonFactory factory,
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void respond(final JsonFactory factory,
|
||||
final ObjectMapper mapper, final Set<String> defaultResponseAggregationsStr,
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
|
||||
final AtomicReference<Exception> exception = new AtomicReference<>();
|
||||
final JsonGenerator generator = factory.createGenerator(out, JsonEncoding.UTF8);
|
||||
|
||||
try {
|
||||
if (results.size() > 1) {
|
||||
@ -113,8 +116,8 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
results
|
||||
.forEach(entryy -> {
|
||||
DAQQueryElement daqQuery = entryy.getKey();
|
||||
Set<String> includedFields = JSONResponseStreamWriter.getFields(daqQuery, false);
|
||||
final DAQQueryElement daqQuery = entryy.getKey();
|
||||
final Set<String> includedFields = JSONResponseStreamWriter.getFields(daqQuery, false);
|
||||
/* make sure identifiers are available */
|
||||
includedFields.add(QueryField.channel.name());
|
||||
includedFields.add(QueryField.backend.name());
|
||||
@ -123,7 +126,7 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
includedFields.addAll(defaultResponseAggregationsStr);
|
||||
}
|
||||
|
||||
ObjectWriter writer = JSONResponseStreamWriter.configureWriter(includedFields, mapper);
|
||||
final ObjectWriter writer = JSONResponseStreamWriter.configureWriter(includedFields, mapper);
|
||||
|
||||
/* get DataEvent stream of sub-queries for later match */
|
||||
final Map<ChannelName, Stream<DataEvent>> streams =
|
||||
@ -147,12 +150,12 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
}
|
||||
});
|
||||
|
||||
BackendQuery backendQuery = backendQueryRef.get();
|
||||
RequestRange requestRange = backendQuery.getRequest().getRequestRange();
|
||||
final BackendQuery backendQuery = backendQueryRef.get();
|
||||
final RequestRange requestRange = backendQuery.getRequest().getRequestRange();
|
||||
BinningStrategy binningStrategy = backendQuery.getBinningStrategy();
|
||||
|
||||
Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING);
|
||||
Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(context, backendQuery);
|
||||
final Mapping mapping = daqQuery.getMappingOrDefault(DEFAULT_MAPPING);
|
||||
final Padder<ChannelName, DataEvent> padder = mapping.getIncomplete().getPadder(backendQuery);
|
||||
|
||||
ToLongFunction<DataEvent> matchProvider = binningStrategy;
|
||||
if (binningStrategy == null) {
|
||||
@ -162,7 +165,7 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
} else if (requestRange.isTimeRangeDefined()) {
|
||||
binningStrategy = new BinningStrategyPerBinTime(MILLIS_PER_PULSE);
|
||||
} else {
|
||||
String message = "Either time or pulseId range must be defined by the query!";
|
||||
final String message = "Either time or pulseId range must be defined by the query!";
|
||||
LOGGER.error(message);
|
||||
throw new IllegalStateException(message);
|
||||
}
|
||||
@ -170,7 +173,7 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
binningStrategy.setRequestRange(requestRange);
|
||||
|
||||
/* online matching of the stream's content */
|
||||
StreamMatcher<ChannelName, DataEvent, List<DataEvent>> streamMatcher =
|
||||
final StreamMatcher<ChannelName, DataEvent, List<DataEvent>> streamMatcher =
|
||||
new StreamMatcher<>(
|
||||
KEY_PROVIDER,
|
||||
matchProvider,
|
||||
@ -179,7 +182,7 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
new BinnedValueCombiner(binningStrategy),
|
||||
padder,
|
||||
streams.values());
|
||||
Iterator<List<DataEvent>> streamsMatchIter = streamMatcher.iterator();
|
||||
final Iterator<List<DataEvent>> streamsMatchIter = streamMatcher.iterator();
|
||||
|
||||
try {
|
||||
generator.writeStartObject();
|
||||
@ -195,7 +198,8 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
streamMatcher.close();
|
||||
} catch (Throwable t) {
|
||||
LOGGER.error(
|
||||
"Something went wrong while closing stream matcher for JSON table response writer.", t);
|
||||
"Something went wrong while closing stream matcher for JSON table response writer.",
|
||||
t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -217,8 +221,8 @@ public class JSONTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean containsAggregation(Set<String> includedFields) {
|
||||
for (Aggregation aggregation : Aggregation.values()) {
|
||||
private static boolean containsAggregation(final Set<String> includedFields) {
|
||||
for (final Aggregation aggregation : Aggregation.values()) {
|
||||
if (includedFields.contains(aggregation.name())) {
|
||||
return true;
|
||||
}
|
||||
|
@ -34,22 +34,22 @@ public class MsgPackHTTPResponse extends AbstractHTTPResponse {
|
||||
super(ResponseFormat.MSGP);
|
||||
}
|
||||
|
||||
public MsgPackHTTPResponse(Compression compression) {
|
||||
public MsgPackHTTPResponse(final Compression compression) {
|
||||
this();
|
||||
setCompression(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(ApplicationContext context, DAQQueries queries, HttpServletResponse response) throws Exception {
|
||||
OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception {
|
||||
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
|
||||
boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
|
||||
try {
|
||||
LOGGER.debug("Executing query '{}'", queries);
|
||||
|
||||
QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
ResponseStreamWriter streamWriter;
|
||||
final QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
final ResponseStreamWriter streamWriter;
|
||||
if (hasMapping) {
|
||||
streamWriter = context.getBean(MsgPackTableResponseStreamWriter.class);
|
||||
} else {
|
||||
@ -57,7 +57,7 @@ public class MsgPackHTTPResponse extends AbstractHTTPResponse {
|
||||
}
|
||||
|
||||
// execute query
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
queryManager.getEvents(queries);
|
||||
// write the response back to the client using java 8 streams
|
||||
streamWriter.respond(result, out, this);
|
||||
|
@ -5,18 +5,23 @@ import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.msgpack.jackson.dataformat.MessagePackFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
|
||||
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
|
||||
|
||||
@ -24,17 +29,22 @@ import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class MsgPackResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
@Resource
|
||||
private MessagePackFactory msgPackFactory;
|
||||
|
||||
@Resource
|
||||
public class MsgPackResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
private ObjectMapper mapper;
|
||||
private MessagePackFactory factory;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONResponseStreamWriter.respond(msgPackFactory, mapper, results, out, response);
|
||||
JSONResponseStreamWriter.respond(factory, mapper, results, out, response);
|
||||
}
|
||||
}
|
||||
|
@ -8,16 +8,18 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.msgpack.jackson.dataformat.MessagePackFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
@ -31,24 +33,24 @@ import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter;
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class MsgPackTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@Resource
|
||||
private MessagePackFactory msgPackFactory;
|
||||
|
||||
@Resource
|
||||
public class MsgPackTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
private ObjectMapper mapper;
|
||||
|
||||
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
|
||||
private Set<Aggregation> defaultResponseAggregations;
|
||||
|
||||
private MessagePackFactory factory;
|
||||
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
|
||||
// aggregations
|
||||
private Set<String> defaultResponseAggregationsStr;
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_MSG_PACK_FACTORY, MessagePackFactory.class);
|
||||
|
||||
final Set<Aggregation> defaultResponseAggregations =
|
||||
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
|
||||
defaultResponseAggregationsStr =
|
||||
defaultResponseAggregations.stream().map(Aggregation::name)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
@ -56,7 +58,7 @@ public class MsgPackTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONTableResponseStreamWriter.respond(context, msgPackFactory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
}
|
||||
}
|
||||
|
@ -34,22 +34,22 @@ public class SmileHTTPResponse extends AbstractHTTPResponse {
|
||||
super(ResponseFormat.SMILE);
|
||||
}
|
||||
|
||||
public SmileHTTPResponse(Compression compression) {
|
||||
public SmileHTTPResponse(final Compression compression) {
|
||||
this();
|
||||
setCompression(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(ApplicationContext context, DAQQueries queries, HttpServletResponse response) throws Exception {
|
||||
OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
public void respond(final ApplicationContext context, final DAQQueries queries, final HttpServletResponse response) throws Exception {
|
||||
final OutputStream out = handleCompressionAndResponseHeaders(response, CONTENT_TYPE);
|
||||
|
||||
boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
final boolean hasMapping = JSONHTTPResponse.validateQueries(queries);
|
||||
|
||||
try {
|
||||
LOGGER.debug("Executing query '{}'", queries);
|
||||
|
||||
QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
ResponseStreamWriter streamWriter;
|
||||
final QueryManager queryManager = context.getBean(QueryManager.class);
|
||||
final ResponseStreamWriter streamWriter;
|
||||
if (hasMapping) {
|
||||
streamWriter = context.getBean(SmileTableResponseStreamWriter.class);
|
||||
} else {
|
||||
@ -57,7 +57,7 @@ public class SmileHTTPResponse extends AbstractHTTPResponse {
|
||||
}
|
||||
|
||||
// execute query
|
||||
List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> result =
|
||||
queryManager.getEvents(queries);
|
||||
// write the response back to the client using java 8 streams
|
||||
streamWriter.respond(result, out, this);
|
||||
|
@ -5,18 +5,23 @@ import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
|
||||
import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
|
||||
|
||||
@ -24,17 +29,23 @@ import ch.psi.daq.queryrest.response.json.JSONResponseStreamWriter;
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class SmileResponseStreamWriter implements ResponseStreamWriter {
|
||||
public class SmileResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
|
||||
@Resource
|
||||
private SmileFactory smileFactory;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper mapper;
|
||||
private SmileFactory factory;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONResponseStreamWriter.respond(smileFactory, mapper, results, out, response);
|
||||
JSONResponseStreamWriter.respond(factory, mapper, results, out, response);
|
||||
}
|
||||
}
|
||||
|
@ -8,16 +8,18 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
import javax.servlet.ServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.json.ChannelName;
|
||||
import ch.psi.daq.domain.query.DAQQueryElement;
|
||||
import ch.psi.daq.domain.query.backend.BackendQuery;
|
||||
@ -31,24 +33,25 @@ import ch.psi.daq.queryrest.response.json.JSONTableResponseStreamWriter;
|
||||
* Takes a Java 8 stream and writes it to the output stream provided by the {@link ServletResponse}
|
||||
* of the current request.
|
||||
*/
|
||||
public class SmileTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
public class SmileTableResponseStreamWriter implements ResponseStreamWriter, ApplicationContextAware {
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@Resource
|
||||
private SmileFactory smileFactory;
|
||||
|
||||
@Resource
|
||||
private ObjectMapper mapper;
|
||||
|
||||
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
|
||||
private Set<Aggregation> defaultResponseAggregations;
|
||||
|
||||
private SmileFactory factory;
|
||||
// In case ArchiverAppliance had several events within the 10ms mapping interval, return these
|
||||
// aggregations
|
||||
private Set<String> defaultResponseAggregationsStr;
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
final Backend backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
mapper = context.getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
factory = context.getBean(QueryRestConfig.BEAN_NAME_SMILE_FACTORY, SmileFactory.class);
|
||||
|
||||
final Set<Aggregation> defaultResponseAggregations =
|
||||
context.getBean(QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS, Set.class);;
|
||||
defaultResponseAggregationsStr =
|
||||
defaultResponseAggregations.stream().map(Aggregation::name)
|
||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
@ -56,7 +59,7 @@ public class SmileTableResponseStreamWriter implements ResponseStreamWriter {
|
||||
|
||||
@Override
|
||||
public void respond(final List<Entry<DAQQueryElement, Stream<Triple<BackendQuery, ChannelName, ?>>>> results,
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONTableResponseStreamWriter.respond(context, smileFactory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
final OutputStream out, final Response response) throws Exception {
|
||||
JSONTableResponseStreamWriter.respond(factory, mapper, defaultResponseAggregationsStr, results, out, response);
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import javax.annotation.Resource;
|
||||
import org.junit.Before;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
import org.springframework.boot.test.SpringApplicationConfiguration;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.annotation.DirtiesContext.ClassMode;
|
||||
import org.springframework.test.context.TestExecutionListeners;
|
||||
@ -19,17 +19,13 @@ import org.springframework.web.context.WebApplicationContext;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import ch.psi.daq.queryrest.QueryRestApplication;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
import ch.psi.daq.test.cassandra.CassandraDaqUnitDependencyInjectionTestExecutionListener;
|
||||
import ch.psi.daq.test.queryrest.config.DaqWebMvcConfig;
|
||||
|
||||
|
||||
@TestExecutionListeners({
|
||||
CassandraDaqUnitDependencyInjectionTestExecutionListener.class,
|
||||
DependencyInjectionTestExecutionListener.class})
|
||||
@SpringApplicationConfiguration(classes = {
|
||||
@SpringBootTest(classes = {
|
||||
QueryRestApplication.class,
|
||||
QueryRestConfig.class,
|
||||
DaqWebMvcConfig.class
|
||||
})
|
||||
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
|
||||
@ -53,5 +49,4 @@ public abstract class AbstractDaqRestTest {
|
||||
// Setup Spring test in webapp-mode (same config as spring-boot)
|
||||
this.mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build();
|
||||
}
|
||||
|
||||
}
|
||||
|
191
src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java
Normal file
191
src/test/java/ch/psi/daq/test/queryrest/backend/BackendTest.java
Normal file
@ -0,0 +1,191 @@
|
||||
package ch.psi.daq.test.queryrest.backend;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotSame;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertSame;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import ch.psi.daq.common.serialization.SerializationHelper;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.query.channels.ChannelNameCache;
|
||||
import ch.psi.daq.filestorage.config.FileStorageConfig;
|
||||
import ch.psi.daq.query.config.QueryConfig;
|
||||
import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
|
||||
public class BackendTest extends AbstractDaqRestTest {
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {}
|
||||
|
||||
@After
|
||||
public void tearDown() {}
|
||||
|
||||
@Test
|
||||
public void testSerialization_01() throws Exception {
|
||||
boolean backendsInit = false;
|
||||
|
||||
for (final Backend backend : Backend.getBackends()) {
|
||||
backendsInit = true;
|
||||
final Backend copy = SerializationHelper.copy(backend);
|
||||
|
||||
assertSame(backend, copy);
|
||||
}
|
||||
assertTrue(backendsInit);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOverload_01() throws Exception {
|
||||
final String propertyContactPoints = "cassandra.contactpoints";
|
||||
String contactPoints;
|
||||
int timeout;
|
||||
boolean backendsInit = false;
|
||||
Backend backendContext;
|
||||
String userHome = System.getProperty("user.home");
|
||||
String storageRootDir;
|
||||
boolean compaction;
|
||||
|
||||
for (final Backend backend : Backend.getBackends()) {
|
||||
backendsInit = true;
|
||||
final ApplicationContext context = backend.getApplicationContext();
|
||||
|
||||
// from domain.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_UPDATE_TIMEOUT, Integer.class);
|
||||
assertEquals(30, timeout);
|
||||
|
||||
// from queryrest-test.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_QUERY_TIMEOUT, Integer.class);
|
||||
assertEquals(11, timeout);
|
||||
|
||||
if (backend.getId() == 255) {
|
||||
// from test-overload.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_SHUTDOWN_TIMEOUT, Integer.class);
|
||||
assertEquals(32, timeout);
|
||||
|
||||
// from test-overload.properties
|
||||
backendContext = context.getBean(DomainConfig.BEAN_NAME_BACKEND_OF_CONTEXT, Backend.class);
|
||||
assertEquals("queryrest-1", backendContext.getName());
|
||||
assertSame(backend, backendContext);
|
||||
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_DELETE_TIMEOUT, Integer.class);
|
||||
assertEquals(42, timeout);
|
||||
storageRootDir =
|
||||
context.getBean(FileStorageConfig.BEAN_NAME_ROOT_DIR, String.class);
|
||||
assertEquals(userHome + "/daq/queryrest1", storageRootDir);
|
||||
|
||||
// from filestorage.properties
|
||||
compaction =
|
||||
context.getBean(FileStorageConfig.BEAN_NAME_FILESTORAGE_COMPACTION_TTL_UNCOMPACTED_DELETES,
|
||||
Boolean.class);
|
||||
assertEquals(false, compaction);
|
||||
} else if (backend.getId() == 254) {
|
||||
// from queryrest-test.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_SHUTDOWN_TIMEOUT, Integer.class);
|
||||
assertEquals(31, timeout);
|
||||
|
||||
// from test-overload2.properties
|
||||
backendContext = context.getBean(DomainConfig.BEAN_NAME_BACKEND_OF_CONTEXT, Backend.class);
|
||||
assertEquals("queryrest-2", backendContext.getName());
|
||||
assertSame(backend, backendContext);
|
||||
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_DELETE_TIMEOUT, Integer.class);
|
||||
assertEquals(43, timeout);
|
||||
contactPoints = context.getEnvironment().getProperty(propertyContactPoints, String.class);
|
||||
assertEquals("localhost", contactPoints);
|
||||
} else if (backend.getId() == 253) {
|
||||
// from queryrest-test.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_SHUTDOWN_TIMEOUT, Integer.class);
|
||||
assertEquals(31, timeout);
|
||||
|
||||
// from test-overload3.properties
|
||||
backendContext = context.getBean(DomainConfig.BEAN_NAME_BACKEND_OF_CONTEXT, Backend.class);
|
||||
assertEquals("queryrest-3", backendContext.getName());
|
||||
assertSame(backend, backendContext);
|
||||
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_DELETE_TIMEOUT, Integer.class);
|
||||
assertEquals(50, timeout);
|
||||
}
|
||||
}
|
||||
assertTrue(backendsInit);
|
||||
|
||||
// from domain.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_UPDATE_TIMEOUT, Integer.class);
|
||||
assertEquals(30, timeout);
|
||||
|
||||
// from queryrest-test.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_QUERY_TIMEOUT, Integer.class);
|
||||
assertEquals(11, timeout);
|
||||
|
||||
// from queryrest-test.properties
|
||||
timeout = context.getBean(DomainConfig.BEAN_NAME_SHUTDOWN_TIMEOUT, Integer.class);
|
||||
assertEquals(31, timeout);
|
||||
|
||||
// does not see these beans
|
||||
try {
|
||||
storageRootDir = context.getBean(FileStorageConfig.BEAN_NAME_ROOT_DIR, String.class);
|
||||
assertTrue(false);
|
||||
} catch (NoSuchBeanDefinitionException e) {
|
||||
assertTrue(true);
|
||||
}
|
||||
try {
|
||||
compaction =
|
||||
context.getBean(FileStorageConfig.BEAN_NAME_FILESTORAGE_COMPACTION_TTL_UNCOMPACTED_DELETES,
|
||||
Boolean.class);
|
||||
assertTrue(false);
|
||||
} catch (NoSuchBeanDefinitionException e) {
|
||||
assertTrue(true);
|
||||
}
|
||||
assertNull(context.getEnvironment().getProperty(propertyContactPoints, String.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInstance_01() throws Exception {
|
||||
final Set<ApplicationContext> parentContexts = new HashSet<>();
|
||||
final Set<ApplicationContext> backendContexts = new HashSet<>();
|
||||
for (final Backend backend : Backend.getBackends()) {
|
||||
parentContexts.add(backend.getParentApplicationContext());
|
||||
backendContexts.add(backend.getApplicationContext());
|
||||
}
|
||||
|
||||
assertEquals(1, parentContexts.size());
|
||||
assertEquals(Backend.getBackends().size(), backendContexts.size());
|
||||
|
||||
final ApplicationContext parentContext = parentContexts.iterator().next();
|
||||
assertSame(context, parentContext);
|
||||
for (final ApplicationContext context : backendContexts) {
|
||||
assertNotSame(context, parentContext);
|
||||
assertSame(parentContext, context.getParent());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleInstance_01() throws Exception {
|
||||
final Set<ChannelNameCache> caches = new HashSet<>();
|
||||
for (final Backend backend : Backend.getBackends()) {
|
||||
ChannelNameCache cache =
|
||||
backend.getApplicationContext().getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE,
|
||||
ChannelNameCache.class);
|
||||
caches.add(cache);
|
||||
}
|
||||
|
||||
assertEquals(1, caches.size());
|
||||
assertSame(
|
||||
context.getBean(QueryConfig.BEAN_NAME_CHANNEL_NAME_CACHE, ChannelNameCache.class),
|
||||
caches.iterator().next());
|
||||
}
|
||||
}
|
@ -1,8 +1,12 @@
|
||||
package ch.psi.daq.test.queryrest.config;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
@ -10,26 +14,23 @@ import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.context.annotation.PropertySources;
|
||||
import org.springframework.context.annotation.Scope;
|
||||
import org.springframework.http.converter.HttpMessageConverter;
|
||||
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
|
||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
|
||||
|
||||
import ch.psi.daq.archiverappliance.config.ArchiverApplianceConfig;
|
||||
import ch.psi.daq.cassandra.config.CassandraConfig;
|
||||
import ch.psi.daq.cassandra.reader.CassandraReader;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.backend.BackendAccess;
|
||||
import ch.psi.daq.domain.backend.BackendType;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.events.ChannelEvent;
|
||||
import ch.psi.daq.domain.query.channels.reader.ChannelInfoReader;
|
||||
import ch.psi.daq.domain.query.processor.QueryProcessor;
|
||||
import ch.psi.daq.domain.reader.DataReader;
|
||||
import ch.psi.daq.domain.reader.StreamEventReader;
|
||||
import ch.psi.daq.filestorage.config.FileStorageConfig;
|
||||
import ch.psi.daq.domain.test.reader.TestReader;
|
||||
import ch.psi.daq.query.config.QueryConfig;
|
||||
import ch.psi.daq.query.processor.QueryProcessorLocal;
|
||||
import ch.psi.daq.test.query.config.LocalQueryTestConfig;
|
||||
import ch.psi.daq.test.queryrest.query.DummyArchiverApplianceReader;
|
||||
import ch.psi.daq.test.queryrest.query.DummyCassandraReader;
|
||||
import ch.psi.daq.test.queryrest.query.DummyFilestorageReader;
|
||||
import ch.psi.daq.queryrest.config.QueryRestConfig;
|
||||
|
||||
@Configuration
|
||||
@ComponentScan
|
||||
@ -37,50 +38,64 @@ import ch.psi.daq.test.queryrest.query.DummyFilestorageReader;
|
||||
@PropertySources(value = {
|
||||
@PropertySource(value = {"classpath:queryrest-test.properties"})
|
||||
})
|
||||
public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
|
||||
public class DaqWebMvcConfig extends WebMvcConfigurerAdapter {
|
||||
|
||||
// ensure that properties in dispatcher.properties are loaded first and then overwritten by the
|
||||
// properties in dispatcher-test.properties
|
||||
@Import(value = {LocalQueryTestConfig.class})
|
||||
@Import(value = {QueryRestConfig.class})
|
||||
static class InnerConfiguration {
|
||||
}
|
||||
|
||||
// somehow needed to make sure @Import elements will get initialized and afterPropertiesSet will
|
||||
// get called (ensuring BackendAcces gets initialized according hierarchy)
|
||||
@Resource
|
||||
private QueryRestConfig queryRestConfig;
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_ACCESS)
|
||||
private BackendAccess backendAccess;
|
||||
public static final String IS_INITIAL_CHANNELS = "initial.channels";
|
||||
public static final String BEAN_NAME_READER = "dummyReader";
|
||||
|
||||
@Resource
|
||||
private ApplicationContext context;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
backendAccess.addStreamEventReaderSupplier(Backend.SF_DATABUFFER, () -> cassandraReader());
|
||||
backendAccess.addChannelInfoReaderSupplier(Backend.SF_DATABUFFER, () -> cassandraReader());
|
||||
|
||||
backendAccess.addStreamEventReaderSupplier(Backend.SF_IMAGEBUFFER, () -> filestorageReader());
|
||||
backendAccess.addChannelInfoReaderSupplier(Backend.SF_IMAGEBUFFER, () -> filestorageReader());
|
||||
// init BackendAccesses
|
||||
final List<Backend> backends = context.getBean(DomainConfig.BEAN_NAME_BACKENDS, List.class);
|
||||
final boolean overload = true;
|
||||
for (final Backend backend : backends) {
|
||||
final BackendType backendType = backend.getType();
|
||||
|
||||
backendAccess.addDataReaderSupplier(Backend.SF_ARCHIVERAPPLIANCE, () -> archiverApplianceReader());
|
||||
// backendType.initBean(backend, BEAN_NAME_READER, DataReader.class, overload);
|
||||
backendType.initBean(backend, BEAN_NAME_READER, StreamEventReader.class, overload, backend);
|
||||
backendType.initBean(backend, BEAN_NAME_READER, ChannelInfoReader.class, overload, backend);
|
||||
}
|
||||
}
|
||||
|
||||
// make sure we use a local QueryProcessor even for distributed calls -> no Hazelcast needs to be started
|
||||
@Bean(name = BEAN_NAME_READER)
|
||||
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
|
||||
@Lazy
|
||||
public StreamEventReader<ChannelEvent> testReader(final Backend backend) {
|
||||
return new TestReader(backend);
|
||||
}
|
||||
|
||||
// make sure we use a local QueryProcessor even for distributed calls -> no Hazelcast needs to be
|
||||
// started
|
||||
@Bean(name = QueryConfig.BEAN_NAME_QUERY_PROCESSOR_DISTRIBUTED)
|
||||
@Lazy
|
||||
public QueryProcessor distributedQueryProcessor() {
|
||||
return new QueryProcessorLocal();
|
||||
}
|
||||
|
||||
@Bean(name = CassandraConfig.BEAN_NAME_CASSANDRA_READER)
|
||||
@Lazy
|
||||
public CassandraReader cassandraReader() {
|
||||
return new DummyCassandraReader();
|
||||
}
|
||||
|
||||
@Bean(name = FileStorageConfig.BEAN_NAME_FILESTORAGE_READER)
|
||||
@Lazy
|
||||
public StreamEventReader<ChannelEvent> filestorageReader() {
|
||||
return new DummyFilestorageReader();
|
||||
}
|
||||
|
||||
@Bean(name = ArchiverApplianceConfig.BEAN_NAME_ARCHIVER_APPLIANCE_READER)
|
||||
@Lazy
|
||||
public DataReader archiverApplianceReader() {
|
||||
return new DummyArchiverApplianceReader();
|
||||
@Override
|
||||
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
|
||||
/**
|
||||
* This is necessary so that the message conversion uses the configured object mapper.
|
||||
* Otherwise, a separate object mapper is instantiated for Springs message conversion.
|
||||
*/
|
||||
final MappingJackson2HttpMessageConverter converter =
|
||||
context.getBean(DomainConfig.BEAN_NAME_MESSAGE_CONVERTER, MappingJackson2HttpMessageConverter.class);
|
||||
converters.add(converter);
|
||||
super.configureMessageConverters(converters);
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -7,10 +7,11 @@ import java.awt.Color;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.test.web.servlet.MvcResult;
|
||||
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
|
||||
@ -54,22 +55,39 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
/**
|
||||
* Tests the {@link DaqController} implementation.
|
||||
*/
|
||||
public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
public class JsonQueryRestControllerTest extends AbstractDaqRestTest implements ApplicationContextAware {
|
||||
|
||||
public static final String TEST_CHANNEL_01 = "testChannel1";
|
||||
public static final String TEST_CHANNEL_02 = "testChannel2";
|
||||
public static final String TEST_CHANNEL_WAVEFORM_01 = "testChannelWaveform1";
|
||||
public static final String[] TEST_CHANNEL_NAMES = new String[] {TEST_CHANNEL_01, TEST_CHANNEL_02};
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT)
|
||||
private Backend backend;
|
||||
private Backend backend2;
|
||||
private Backend backend3;
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext context) throws BeansException {
|
||||
backend = context.getBean(DomainConfig.BEAN_NAME_BACKEND_DEFAULT, Backend.class);
|
||||
context = backend.getApplicationContext();
|
||||
|
||||
for (final Backend baknd : Backend.getBackends()) {
|
||||
if (baknd != backend) {
|
||||
if (backend2 == null) {
|
||||
backend2 = baknd;
|
||||
} else if (backend3 == null) {
|
||||
backend3 = baknd;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {}
|
||||
|
||||
@Test
|
||||
public void testChannelNameQuery() throws Exception {
|
||||
|
||||
this.mockMvc
|
||||
.perform(
|
||||
MockMvcRequestBuilders
|
||||
@ -79,17 +97,17 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(Backend.SF_DATABUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("BoolScalar"))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[1]").value("BoolWaveform"))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(backend2.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(Backend.SF_IMAGEBUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(backend3.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[0]").value("BoolScalar"))
|
||||
@ -108,7 +126,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(Backend.SF_DATABUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("Int32Scalar"))
|
||||
@ -119,10 +137,10 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[3]").value("UInt32Waveform"))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(backend2.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(Backend.SF_IMAGEBUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(backend3.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[0]").value("Int32Scalar"))
|
||||
@ -137,7 +155,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
@Test
|
||||
public void testChannelNameQueryBackendOrder() throws Exception {
|
||||
ChannelsRequest request = new ChannelsRequest("int64", Ordering.desc, backend);
|
||||
|
||||
|
||||
String content = mapper.writeValueAsString(request);
|
||||
System.out.println(content);
|
||||
|
||||
@ -150,7 +168,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[0]").value("UInt64Waveform"))
|
||||
@ -169,7 +187,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
|
||||
String content = mapper.writeValueAsString(request);
|
||||
System.out.println(content);
|
||||
|
||||
|
||||
this.mockMvc
|
||||
.perform(MockMvcRequestBuilders
|
||||
.post(DomainConfig.PATH_CHANNELS)
|
||||
@ -179,13 +197,15 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(Backend.SF_DATABUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[25]").doesNotExist())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[23]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").doesNotExist())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(backend2.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(Backend.SF_IMAGEBUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels[1]").doesNotExist())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(backend3.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[23]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[24]").doesNotExist());
|
||||
@ -205,13 +225,13 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(Backend.SF_DATABUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[26]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[27]").doesNotExist())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(Backend.SF_ARCHIVERAPPLIANCE.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[24]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channels[25]").doesNotExist())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].backend").value(backend2.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(Backend.SF_IMAGEBUFFER.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].backend").value(backend3.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[24]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[2].channels[25]").doesNotExist());
|
||||
@ -366,7 +386,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
100,
|
||||
101),
|
||||
new ChannelName(TEST_CHANNEL_01, backend),
|
||||
new ChannelName(TEST_CHANNEL_02, Backend.SF_ARCHIVERAPPLIANCE));
|
||||
new ChannelName(TEST_CHANNEL_02, backend2));
|
||||
|
||||
String content = mapper.writeValueAsString(request);
|
||||
System.out.println(content);
|
||||
@ -569,7 +589,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
@ -579,7 +599,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
TestTimeUtils.getTimeStr(1, 10000000)))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].globalSeconds").value(
|
||||
@ -659,7 +679,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
@ -710,7 +730,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
@ -777,7 +797,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(1000))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
@ -892,7 +912,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
@ -952,7 +972,7 @@ public class JsonQueryRestControllerTest extends AbstractDaqRestTest {
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel").isMap())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_WAVEFORM_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getKey()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.backend").value(backend.getName()))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].globalSeconds").value(
|
||||
|
@ -35,10 +35,10 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
*/
|
||||
public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
|
||||
private ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT)
|
||||
private Backend backend;
|
||||
|
||||
private ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {}
|
||||
@ -49,7 +49,7 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
new RequestRangePulseId(
|
||||
100,
|
||||
101),
|
||||
backend.getKey() + "1", backend.getKey() + "2");
|
||||
backend.getName() + "1", backend.getName() + "2");
|
||||
|
||||
String content = mapper.writeValueAsString(query);
|
||||
System.out.println(content);
|
||||
@ -69,19 +69,19 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
List<? extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
|
||||
assertEquals(2, infosList.size());
|
||||
ChannelInfos cInfos = infosList.get(0);
|
||||
assertEquals(backend.getKey() + "1", cInfos.getChannel().getName());
|
||||
assertEquals(backend.getName() + "1", cInfos.getChannel().getName());
|
||||
assertEquals(backend, cInfos.getChannel().getBackend());
|
||||
List<ChannelInfo> infos = cInfos.getChannelInfos().collect(Collectors.toList());
|
||||
assertEquals(2, infos.size());
|
||||
ChannelInfo info = infos.get(0);
|
||||
assertEquals(backend.getKey() + "1", info.getChannel());
|
||||
assertEquals(backend.getName() + "1", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
|
||||
assertArrayEquals(new int[] {1}, info.getShape());
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
info = infos.get(1);
|
||||
assertEquals(backend.getKey() + "1", info.getChannel());
|
||||
assertEquals(backend.getName() + "1", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
|
||||
@ -89,19 +89,19 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
|
||||
cInfos = infosList.get(1);
|
||||
assertEquals(backend.getKey() + "2", cInfos.getChannel().getName());
|
||||
assertEquals(backend.getName() + "2", cInfos.getChannel().getName());
|
||||
assertEquals(backend, cInfos.getChannel().getBackend());
|
||||
infos = cInfos.getChannelInfos().collect(Collectors.toList());
|
||||
assertEquals(2, infos.size());
|
||||
info = infos.get(0);
|
||||
assertEquals(backend.getKey() + "2", info.getChannel());
|
||||
assertEquals(backend.getName() + "2", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
|
||||
assertArrayEquals(new int[] {1}, info.getShape());
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
info = infos.get(1);
|
||||
assertEquals(backend.getKey() + "2", info.getChannel());
|
||||
assertEquals(backend.getName() + "2", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
|
||||
@ -115,7 +115,7 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
new RequestRangePulseId(
|
||||
100,
|
||||
101),
|
||||
backend.getKey() + "1", backend.getKey() + "2");
|
||||
backend.getName() + "1", backend.getName() + "2");
|
||||
query.setOrdering(Ordering.desc);
|
||||
|
||||
String content = mapper.writeValueAsString(query);
|
||||
@ -136,19 +136,19 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
List<? extends ChannelInfos> infosList = objectMapper.readValue(response, ChannelInfosList.class);
|
||||
assertEquals(2, infosList.size());
|
||||
ChannelInfos cInfos = infosList.get(0);
|
||||
assertEquals(backend.getKey() + "1", cInfos.getChannel().getName());
|
||||
assertEquals(backend.getName() + "1", cInfos.getChannel().getName());
|
||||
assertEquals(backend, cInfos.getChannel().getBackend());
|
||||
List<ChannelInfo> infos = cInfos.getChannelInfos().collect(Collectors.toList());
|
||||
assertEquals(2, infos.size());
|
||||
ChannelInfo info = infos.get(0);
|
||||
assertEquals(backend.getKey() + "1", info.getChannel());
|
||||
assertEquals(backend.getName() + "1", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
|
||||
assertArrayEquals(new int[] {1}, info.getShape());
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
info = infos.get(1);
|
||||
assertEquals(backend.getKey() + "1", info.getChannel());
|
||||
assertEquals(backend.getName() + "1", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
|
||||
@ -156,19 +156,19 @@ public class QueryRestControllerChannelInfoTest extends AbstractDaqRestTest {
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
|
||||
cInfos = infosList.get(1);
|
||||
assertEquals(backend.getKey() + "2", cInfos.getChannel().getName());
|
||||
assertEquals(backend.getName() + "2", cInfos.getChannel().getName());
|
||||
assertEquals(backend, cInfos.getChannel().getBackend());
|
||||
infos = cInfos.getChannelInfos().collect(Collectors.toList());
|
||||
assertEquals(2, infos.size());
|
||||
info = infos.get(0);
|
||||
assertEquals(backend.getKey() + "2", info.getChannel());
|
||||
assertEquals(backend.getName() + "2", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getEndPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getEndPulseId(), info.getPulseId());
|
||||
assertArrayEquals(new int[] {1}, info.getShape());
|
||||
assertEquals(Type.Int32.getKey(), info.getType());
|
||||
info = infos.get(1);
|
||||
assertEquals(backend.getKey() + "2", info.getChannel());
|
||||
assertEquals(backend.getName() + "2", info.getChannel());
|
||||
assertEquals(backend, info.getBackend());
|
||||
assertEquals(TimeUtils.getTimeFromMillis(query.getRange().getStartPulseId() * 10, 0), info.getGlobalTime());
|
||||
assertEquals(query.getRange().getStartPulseId(), info.getPulseId());
|
||||
|
@ -1,430 +0,0 @@
|
||||
package ch.psi.daq.test.queryrest.query;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.LongStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ch.psi.bsread.message.Type;
|
||||
import ch.psi.daq.common.ordering.Ordering;
|
||||
import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.FieldNames;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.events.ChannelConfiguration;
|
||||
import ch.psi.daq.domain.events.ChannelEvent;
|
||||
import ch.psi.daq.domain.events.MetaPulseId;
|
||||
import ch.psi.daq.domain.events.impl.ChannelConfigurationImpl;
|
||||
import ch.psi.daq.domain.events.impl.ChannelEventImpl;
|
||||
import ch.psi.daq.domain.events.impl.MetaPulseIdImpl;
|
||||
import ch.psi.daq.domain.json.channels.info.ChannelInfo;
|
||||
import ch.psi.daq.domain.json.channels.info.ChannelInfoImpl;
|
||||
import ch.psi.daq.domain.query.event.EventQuery;
|
||||
import ch.psi.daq.domain.query.event.StreamEventQuery;
|
||||
import ch.psi.daq.domain.query.range.PulseIdRangeQuery;
|
||||
import ch.psi.daq.domain.query.range.TimeRangeQuery;
|
||||
import ch.psi.daq.domain.query.transform.ValueTransformation;
|
||||
import ch.psi.daq.domain.reader.MetaStreamEventQuery;
|
||||
import ch.psi.daq.domain.reader.StreamEventReader;
|
||||
import ch.psi.daq.domain.test.backend.TestBackendAccess;
|
||||
import ch.psi.daq.domain.test.gen.TestDataGen;
|
||||
import ch.psi.daq.domain.utils.PropertiesUtils;
|
||||
import ch.psi.data.stream.converters.impl.UShortByteValueConverter;
|
||||
|
||||
public abstract class AbstractStreamEventReader implements StreamEventReader<ChannelEvent> {
|
||||
private static final Random random = new Random(0);
|
||||
private static final int KEYSPACE = 1;
|
||||
private List<String> channels;
|
||||
private AtomicLong channelNameCallCounter = new AtomicLong();
|
||||
|
||||
private TestDataGen dataGen;
|
||||
private Backend backend;
|
||||
private String testChannelName;
|
||||
|
||||
@Resource(name = DomainConfig.BEAN_NAME_TEST_BACKEND_ACCESS)
|
||||
private TestBackendAccess testBackendAccess;
|
||||
|
||||
|
||||
public AbstractStreamEventReader() {
|
||||
this.channels = Lists.newArrayList(
|
||||
"BoolScalar",
|
||||
"BoolWaveform",
|
||||
"Int8Scalar",
|
||||
"Int8Waveform",
|
||||
"UInt8Scalar",
|
||||
"UInt8Waveform",
|
||||
"Int16Scalar",
|
||||
"Int16Waveform",
|
||||
"UInt16Scalar",
|
||||
"UInt16Waveform",
|
||||
"Int32Scalar",
|
||||
"Int32Waveform",
|
||||
"UInt32Scalar",
|
||||
"UInt32Waveform",
|
||||
"Int64Scalar",
|
||||
"Int64Waveform",
|
||||
"UInt64Scalar",
|
||||
"UInt64Waveform",
|
||||
"Float32Scalar",
|
||||
"Float32Waveform",
|
||||
"Float64Scalar",
|
||||
"Float64Waveform",
|
||||
"StringScalar");
|
||||
}
|
||||
|
||||
protected void init(Backend backend) {
|
||||
this.backend = backend;
|
||||
this.dataGen = testBackendAccess.getTestDataGen(backend);
|
||||
this.testChannelName = backend.getKey() + "_TestChannel_";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Backend getBackend() {
|
||||
return backend;
|
||||
}
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public Stream<String> getChannelStream(String regex) {
|
||||
channels.add(testChannelName + channelNameCallCounter.incrementAndGet());
|
||||
|
||||
Stream<String> channelStream = channels.stream();
|
||||
if (regex != null) {
|
||||
Pattern pattern = Pattern.compile(regex.toLowerCase());
|
||||
channelStream = channelStream.filter(channel -> pattern.matcher(channel.toLowerCase()).find());
|
||||
}
|
||||
return channelStream;
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public Stream<ChannelEvent> getEventStream(PulseIdRangeQuery query) {
|
||||
// return getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(),
|
||||
// query.getEventColumns())
|
||||
// .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
|
||||
// }
|
||||
|
||||
@Override
|
||||
public Stream<ChannelEvent> getEventStream(TimeRangeQuery query) {
|
||||
return getDummyEventStream(query.getChannel(), getBackend(), query.getStartMillis() / 10,
|
||||
query.getEndMillis() / 10)
|
||||
.filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER))
|
||||
// misuse value modification
|
||||
.peek(query.getValueTransformationOrDefault(ValueTransformation.NO_OP));
|
||||
}
|
||||
|
||||
public Stream<ChannelEvent> getEventStream(EventQuery eventQuery, Stream<? extends StreamEventQuery> queryProviders) {
|
||||
Stream<ChannelEvent> result = queryProviders.map(ceq -> {
|
||||
if (ceq instanceof MetaStreamEventQuery) {
|
||||
return getEvent((MetaStreamEventQuery<ChannelEvent>) ceq);
|
||||
} else {
|
||||
throw new UnsupportedOperationException("This is not yet implemented!");
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Stream<ChannelEvent> getDummyEventStream(String channelParam, Backend backend, long startIndex,
|
||||
long endIndex,
|
||||
String... columns) {
|
||||
String channelLower = channelParam.toLowerCase();
|
||||
String channel =
|
||||
(columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_CHANNEL)) ? channelParam
|
||||
: null;
|
||||
|
||||
LongStream millisRangeStream = null;
|
||||
|
||||
if (channelParam.contains("[") && channelParam.contains("]")) {
|
||||
millisRangeStream =
|
||||
Arrays.stream(
|
||||
channelParam.substring(
|
||||
channelParam.indexOf("[") + 1,
|
||||
channelParam.indexOf("]"))
|
||||
.split(",")
|
||||
)
|
||||
.mapToLong(str -> Long.parseLong(str.trim()) * 10);
|
||||
} else if (channelParam.contains("{") && channelParam.contains("}")) {
|
||||
millisRangeStream =
|
||||
Arrays.stream(
|
||||
channelParam.substring(
|
||||
channelParam.indexOf("{") + 1,
|
||||
channelParam.indexOf("}"))
|
||||
.split(",")
|
||||
)
|
||||
.mapToLong(str -> Long.parseLong(str));
|
||||
} else {
|
||||
millisRangeStream = LongStream.rangeClosed(startIndex * 10, endIndex * 10)
|
||||
.filter(val -> val % 10 == 0);
|
||||
}
|
||||
|
||||
Stream<ChannelEvent> eventStream =
|
||||
millisRangeStream.mapToObj(
|
||||
millis -> {
|
||||
long i = millis / 10;
|
||||
BigDecimal iocTime =
|
||||
(columns == null || columns.length == 0 || ArrayUtils.contains(columns,
|
||||
FieldNames.FIELD_IOC_TIME)) ? TimeUtils.getTimeFromMillis(millis, 0)
|
||||
: PropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
BigDecimal globalTime =
|
||||
(columns == null || columns.length == 0 || ArrayUtils.contains(columns,
|
||||
FieldNames.FIELD_GLOBAL_TIME)) ? TimeUtils.getTimeFromMillis(millis, 0)
|
||||
: PropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
long pulseId =
|
||||
(columns == null || columns.length == 0 || ArrayUtils.contains(columns,
|
||||
FieldNames.FIELD_PULSE_ID)) ? i : PropertiesUtils.DEFAULT_VALUE_BIGINT_PRIMITIVE;
|
||||
|
||||
if (channelLower.contains("waveform")) {
|
||||
long[] value = random.longs(8).toArray();
|
||||
value[0] = i;
|
||||
value[1] = i;
|
||||
return new ChannelEventImpl(
|
||||
channel,
|
||||
backend,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value
|
||||
);
|
||||
|
||||
} else if (channelLower.contains("image")) {
|
||||
int x = 80;
|
||||
int y = 40;
|
||||
int[] shape = new int[] {x, y};
|
||||
short[] value = new short[x * y];
|
||||
IntStream.range(0, value.length)
|
||||
.forEach(index -> value[index] = UShortByteValueConverter.convertVal(random.nextInt()));
|
||||
value[0] = UShortByteValueConverter.convertVal((int) i);
|
||||
value[1] = UShortByteValueConverter.convertVal((int) i);
|
||||
return new ChannelEventImpl(
|
||||
channel,
|
||||
backend,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value,
|
||||
shape
|
||||
);
|
||||
} else {
|
||||
return new ChannelEventImpl(
|
||||
channel,
|
||||
backend,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
i
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return eventStream;
|
||||
}
|
||||
|
||||
private List<? extends DataEvent> getDummyEvents(String channel, long startIndex, long endIndex, String... columns) {
|
||||
return getDummyEventStream(channel, getBackend(), startIndex, endIndex, columns).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public List<String> getChannels() {
|
||||
return Lists.newArrayList(channels);
|
||||
}
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public List<String> getChannels(String regex) {
|
||||
return Lists.newArrayList(channels).stream().filter(s -> {
|
||||
return s.contains(regex);
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public ChannelEvent getEvent(MetaStreamEventQuery<ChannelEvent> queryInfo, String... columns) {
|
||||
if (queryInfo.getPulseId() > 0) {
|
||||
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getPulseId(), queryInfo.getPulseId(),
|
||||
columns)
|
||||
.get(0);
|
||||
}
|
||||
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getGlobalMillis() / 10,
|
||||
queryInfo.getGlobalMillis() / 10).get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public CompletableFuture<ChannelEvent> getEventAsync(MetaStreamEventQuery<ChannelEvent> queryInfo, String... columns) {
|
||||
// implement when needed
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
// /**
|
||||
// * @{inheritDoc
|
||||
// */
|
||||
// @Override
|
||||
// public Stream<? extends StreamEventQuery> getStreamEventQueryStream(PulseIdRangeQuery query) {
|
||||
//
|
||||
// return dataGen.generateMetaPulseId(
|
||||
// query.getStartPulseId(),
|
||||
// (query.getEndPulseId() - query.getStartPulseId() + 1),
|
||||
// i -> i * 10,
|
||||
// i -> 0,
|
||||
// i -> i,
|
||||
// query.getChannel())
|
||||
// .stream()
|
||||
// .map(metaPulse -> {
|
||||
// metaPulse.setKeyspace(KEYSPACE);
|
||||
// return metaPulse;
|
||||
// });
|
||||
// }
|
||||
|
||||
public Stream<? extends StreamEventQuery> getStreamEventQueryStream(TimeRangeQuery query) {
|
||||
|
||||
return dataGen.generateMetaTime(
|
||||
KEYSPACE,
|
||||
3600,
|
||||
query.getStartMillis() / 10,
|
||||
((query.getEndMillis() - query.getStartMillis()) / 10 + 1),
|
||||
i -> i * 10,
|
||||
i -> 0,
|
||||
i -> i,
|
||||
getBackend(),
|
||||
query.getChannel()).stream();
|
||||
}
|
||||
|
||||
// /**
|
||||
// * @{inheritDoc
|
||||
// */
|
||||
// @Override
|
||||
// public Stream<MetaPulseId> getMetaStream(PulseIdRangeQuery query) {
|
||||
//
|
||||
// return getStreamEventQueryStream(query).map(r -> {
|
||||
// return (MetaPulseId) r;
|
||||
// });
|
||||
//
|
||||
// }
|
||||
|
||||
|
||||
/**
|
||||
* @{inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public Stream<? extends MetaStreamEventQuery<ChannelEvent>> getMetaStream(TimeRangeQuery query) {
|
||||
|
||||
return getStreamEventQueryStream(query).map(r -> {
|
||||
return (MetaStreamEventQuery<ChannelEvent>) r;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<ChannelEvent> getEventStream(Stream<? extends MetaStreamEventQuery<ChannelEvent>> queryInfos,
|
||||
String... columns) {
|
||||
return getEventStream(null, queryInfos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<ChannelConfiguration> getChannelConfiguration(TimeRangeQuery query) {
|
||||
List<ChannelConfiguration> configs = new ArrayList<>();
|
||||
|
||||
BigDecimal time = query.getStartTime();
|
||||
configs.add(
|
||||
new ChannelConfigurationImpl(
|
||||
query.getChannel(),
|
||||
time,
|
||||
TimeUtils.getMillis(time) / 10,
|
||||
0,
|
||||
Type.Int32.getKey(),
|
||||
new int[] {1},
|
||||
false,
|
||||
ChannelConfiguration.DEFAULT_LOCAL_WRITE,
|
||||
ChannelConfiguration.DEFAULT_BIN_SIZE_IN_MILLIS,
|
||||
ChannelConfiguration.SPLIT_COUNT,
|
||||
ChannelConfiguration.DEFAULT_SOURCE,
|
||||
ChannelConfiguration.DEFAULT_MODULO,
|
||||
ChannelConfiguration.DEFAULT_OFFSET,
|
||||
Backend.SF_DATABUFFER));
|
||||
if (query.getEndMillis() > query.getStartMillis()) {
|
||||
time = query.getEndTime();
|
||||
configs.add(
|
||||
new ChannelConfigurationImpl(
|
||||
query.getChannel(),
|
||||
time,
|
||||
TimeUtils.getMillis(time) / 10,
|
||||
1,
|
||||
Type.Int32.getKey(),
|
||||
new int[] {1},
|
||||
false,
|
||||
ChannelConfiguration.DEFAULT_LOCAL_WRITE,
|
||||
ChannelConfiguration.DEFAULT_BIN_SIZE_IN_MILLIS,
|
||||
ChannelConfiguration.SPLIT_COUNT,
|
||||
ChannelConfiguration.DEFAULT_SOURCE,
|
||||
ChannelConfiguration.DEFAULT_MODULO,
|
||||
ChannelConfiguration.DEFAULT_OFFSET,
|
||||
Backend.SF_DATABUFFER));
|
||||
}
|
||||
|
||||
if (Ordering.desc.equals(query.getOrdering())) {
|
||||
Collections.reverse(configs);
|
||||
}
|
||||
|
||||
return configs.stream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeRangeQuery getTimeRangeQuery(PulseIdRangeQuery query) {
|
||||
return new TimeRangeQuery(
|
||||
TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0),
|
||||
TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0),
|
||||
query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<? extends ChannelInfo> getChannelInfoStream(TimeRangeQuery query) {
|
||||
return getChannelConfiguration(query)
|
||||
.map(channelConfiguration -> new ChannelInfoImpl(channelConfiguration));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void truncateCache() {}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<MetaPulseId> getStartMetaPulseIdAsync(PulseIdRangeQuery query) {
|
||||
return CompletableFuture.completedFuture(new MetaPulseIdImpl(query.getChannel(), getBackend(), query
|
||||
.getStartPulseId(),
|
||||
TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<MetaPulseId> getEndMetaPulseIdAsync(PulseIdRangeQuery query) {
|
||||
return CompletableFuture.completedFuture(new MetaPulseIdImpl(query.getChannel(), getBackend(), query
|
||||
.getEndPulseId(),
|
||||
TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0)));
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
package ch.psi.daq.test.queryrest.query;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.StreamEvent;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.query.event.EventQuery;
|
||||
import ch.psi.daq.domain.query.range.PulseIdRangeQuery;
|
||||
import ch.psi.daq.domain.query.range.TimeRangeQuery;
|
||||
import ch.psi.daq.domain.query.transform.ValueTransformation;
|
||||
import ch.psi.daq.domain.reader.DataReader;
|
||||
|
||||
public class DummyArchiverApplianceReader implements DataReader {
|
||||
public static final String TEST_CHANNEL = Backend.SF_ARCHIVERAPPLIANCE.getKey() + "_TestChannel_";
|
||||
|
||||
public static final String TEST_CHANNEL_1 = Backend.SF_ARCHIVERAPPLIANCE.getKey() + "_Channel_1";
|
||||
public static final String TEST_CHANNEL_2 = Backend.SF_ARCHIVERAPPLIANCE.getKey() + "_Channel_2";
|
||||
private List<String> channels = Lists.newArrayList(TEST_CHANNEL_1, TEST_CHANNEL_2);
|
||||
|
||||
private AtomicLong channelNameCallCounter = new AtomicLong();
|
||||
|
||||
|
||||
@Override
|
||||
public Backend getBackend() {
|
||||
return Backend.SF_ARCHIVERAPPLIANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<String> getChannelStream(String regex) {
|
||||
channels.add(TEST_CHANNEL + channelNameCallCounter.incrementAndGet());
|
||||
|
||||
Stream<String> channelStream = channels.stream();
|
||||
if (regex != null) {
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
channelStream = channelStream.filter(channel -> pattern.matcher(channel).find());
|
||||
}
|
||||
|
||||
return channelStream;
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public Stream<? extends StreamEvent> getEventStream(PulseIdRangeQuery query) {
|
||||
// return DummyCassandraReader.getDummyEventStream(query.getChannel(), query.getStartPulseId(),
|
||||
// query.getEndPulseId(),
|
||||
// query.getEventColumns())
|
||||
// .filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER));
|
||||
// }
|
||||
|
||||
@Override
|
||||
public Stream<? extends StreamEvent> getEventStream(TimeRangeQuery query) {
|
||||
return DummyCassandraReader.getDummyEventStream(query.getChannel(), getBackend(), query.getStartMillis() / 10,
|
||||
query.getEndMillis() / 10)
|
||||
.filter(query.getFilterOrDefault(EventQuery.NO_OP_FILTER))
|
||||
// misuse value modification
|
||||
.peek(query.getValueTransformationOrDefault(ValueTransformation.NO_OP));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeRangeQuery getTimeRangeQuery(PulseIdRangeQuery query) {
|
||||
return new TimeRangeQuery(
|
||||
TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0),
|
||||
TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0),
|
||||
query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void truncateCache() {}
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
package ch.psi.daq.test.queryrest.query;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import ch.psi.daq.cassandra.reader.CassandraReader;
|
||||
import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.events.ChannelConfiguration;
|
||||
import ch.psi.daq.domain.events.MetaPulseId;
|
||||
import ch.psi.daq.domain.events.impl.MetaPulseIdImpl;
|
||||
import ch.psi.daq.domain.query.range.PulseIdRangeQuery;
|
||||
import ch.psi.daq.domain.query.range.TimeRangeQuery;
|
||||
|
||||
public class DummyCassandraReader extends AbstractStreamEventReader implements CassandraReader {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DummyCassandraReader.class);
|
||||
|
||||
public DummyCassandraReader() {
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
init(Backend.SF_DATABUFFER);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelConfiguration getChannelConfigurationBefore(TimeRangeQuery query) {
|
||||
try {
|
||||
return getChannelConfigurationBeforeAsync(query)
|
||||
.get(30, TimeUnit.SECONDS);
|
||||
} catch (Throwable t) {
|
||||
LOGGER.error("Could not read ChannelConfiguration from DB.", t);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<ChannelConfiguration> getChannelConfigurationBeforeAsync(TimeRangeQuery query) {
|
||||
// implement when needed
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelConfiguration getChannelConfigurationAfter(TimeRangeQuery query) {
|
||||
try {
|
||||
return getChannelConfigurationAfterAsync(query)
|
||||
.get(30, TimeUnit.SECONDS);
|
||||
} catch (Throwable t) {
|
||||
LOGGER.error("Could not read ChannelConfiguration from DB.", t);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<ChannelConfiguration> getChannelConfigurationAfterAsync(TimeRangeQuery query) {
|
||||
// implement when needed
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<MetaPulseId> getStartMetaPulseIdAsync(PulseIdRangeQuery query) {
|
||||
return CompletableFuture.completedFuture(new MetaPulseIdImpl(query.getChannel(), getBackend(), query.getStartPulseId(),
|
||||
TimeUtils.getTimeFromMillis(query.getStartPulseId() * 10, 0)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<MetaPulseId> getEndMetaPulseIdAsync(PulseIdRangeQuery query) {
|
||||
return CompletableFuture.completedFuture(new MetaPulseIdImpl(query.getChannel(), getBackend(), query.getEndPulseId(),
|
||||
TimeUtils.getTimeFromMillis(query.getEndPulseId() * 10, 0)));
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
package ch.psi.daq.test.queryrest.query;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
|
||||
public class DummyFilestorageReader extends AbstractStreamEventReader {
|
||||
|
||||
public DummyFilestorageReader() {
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
init(Backend.SF_IMAGEBUFFER);
|
||||
}
|
||||
}
|
@ -7,6 +7,7 @@ import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -15,6 +16,8 @@ import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import ch.psi.daq.domain.backend.Backend;
|
||||
import ch.psi.daq.domain.config.DomainConfig;
|
||||
import ch.psi.daq.domain.query.DAQQuery;
|
||||
import ch.psi.daq.domain.query.operation.Compression;
|
||||
import ch.psi.daq.domain.query.response.Response;
|
||||
@ -25,9 +28,15 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
|
||||
public class ResponseQueryTest extends AbstractDaqRestTest{
|
||||
|
||||
@Resource
|
||||
@Resource(name = DomainConfig.BEAN_NAME_BACKEND_DEFAULT)
|
||||
private Backend backend;
|
||||
private ObjectMapper mapper;
|
||||
|
||||
@PostConstruct
|
||||
public void afterPropertiesSet() {
|
||||
mapper = backend.getApplicationContext().getBean(DomainConfig.BEAN_NAME_OBJECT_MAPPER, ObjectMapper.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test_JSON_01() throws JsonParseException, JsonMappingException, IOException {
|
||||
Response respose = new CSVHTTPResponse();
|
||||
|
@ -1,9 +1,19 @@
|
||||
backend.default=sf-imagebuffer
|
||||
backend.types=ch.psi.daq.filestorage.backend.FilestorageBackendType,ch.psi.daq.cassandra.backend.CassandraBackendType,ch.psi.daq.archiverappliance.backend.ArchiverApplianceBackendType
|
||||
backend.default=queryrest-1
|
||||
backends.active=["queryrest-1","queryrest-2","queryrest-3"]
|
||||
backends=[{"id":255,"name":"queryrest-1","dbKey":255,"type":{"name":"filestorage"},"properties":["classpath:test-overload.properties"],"packages":["ch.psi.daq.filestorage.config"]},{"id":254,"name":"queryrest-2","dbKey":254,"type":{"name":"cassandra"},"properties":["classpath:test-overload2.properties"],"packages":["ch.psi.daq.cassandra.config"]},{"id":253,"name":"queryrest-3","dbKey":253,"type":{"name":"archiverappliance"},"properties":["classpath:test-overload3.properties"],"packages":["ch.psi.daq.archiverappliance.config"]}]
|
||||
|
||||
query.hazelcast.node=true
|
||||
|
||||
# the base for the keyspaces
|
||||
domain.keyspace.base=daq_query_test
|
||||
|
||||
channelname.cache.reload.period=-1
|
||||
|
||||
# defines if the MainHeader should be checked and dropped (false -> check but not dropped)
|
||||
dispatcher.validate.mainheader=false
|
||||
dispatcher.validate.mainheader.log=false
|
||||
query.min.time=1970-01-01T00:00:00.000000000+00:00
|
||||
# enable/disable validation
|
||||
filestorage.pulseidtime.store.validate.range=false
|
||||
filestorage.pulseidtime.cache.commit.period=0
|
||||
|
||||
# overload test
|
||||
timeout.delete=41
|
||||
timeout.query=11
|
||||
timeout.shutdown=31
|
||||
filestorage.root.dir=${user.home}/daq/query_test
|
19
src/test/resources/test-overload.properties
Normal file
19
src/test/resources/test-overload.properties
Normal file
@ -0,0 +1,19 @@
|
||||
backend.init=lazy
|
||||
filestorage.root.dir=${user.home}/daq/queryrest1
|
||||
|
||||
filestorage.hazelcast.node=true
|
||||
filestorage.netty.node.writer=true
|
||||
filestorage.netty.node.query=true
|
||||
|
||||
# enables/disables ttl based compaction
|
||||
filestorage.compaction.ttl.enable=false
|
||||
|
||||
# enable/disable validation
|
||||
filestorage.pulseidtime.store.validate.range=false
|
||||
filestorage.pulseidtime.cache.commit.period=0
|
||||
|
||||
# overload test
|
||||
timeout.delete=42
|
||||
timeout.shutdown=32
|
||||
|
||||
test.initial.channels=true
|
11
src/test/resources/test-overload2.properties
Normal file
11
src/test/resources/test-overload2.properties
Normal file
@ -0,0 +1,11 @@
|
||||
backend.init=lazy
|
||||
|
||||
cassandra.keyspace.config=[{"number":0,"replication":1,"note":"meta"},{"number":1,"replication":1,"note":"config_backup"},{"number":2,"replication":1,"note":"default_scalar"},{"number":3,"replication":1,"note":"default_waveform"},{"number":4,"replication":1,"note":"default_image"}]
|
||||
|
||||
# run a local cluster for the tests
|
||||
cassandra.local.cluster=true
|
||||
|
||||
# overload test
|
||||
timeout.delete=43
|
||||
|
||||
test.initial.channels=false
|
6
src/test/resources/test-overload3.properties
Normal file
6
src/test/resources/test-overload3.properties
Normal file
@ -0,0 +1,6 @@
|
||||
backend.init=lazy
|
||||
|
||||
# overload test
|
||||
timeout.delete=50
|
||||
|
||||
test.initial.channels=true
|
Reference in New Issue
Block a user