ATEST-123

This commit is contained in:
Fabian Märki
2015-07-31 10:12:52 +02:00
parent 107bddbd72
commit ebb904009d
6 changed files with 110 additions and 56 deletions

View File

@ -26,11 +26,9 @@ import ch.psi.daq.cassandra.util.test.CassandraDataGen;
import ch.psi.daq.common.json.deserialize.AttributeBasedDeserializer;
import ch.psi.daq.common.statistic.StorelessStatistics;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.query.analyzer.ArchiverApplianceQueryAnalyzer;
import ch.psi.daq.query.analyzer.CassandraQueryAnalyzer;
import ch.psi.daq.query.analyzer.QueryAnalyzerImpl;
import ch.psi.daq.query.analyzer.QueryAnalyzer;
import ch.psi.daq.query.config.QueryConfig;
import ch.psi.daq.query.config.QueryRunMode;
import ch.psi.daq.query.model.AbstractQuery;
import ch.psi.daq.query.model.Aggregation;
import ch.psi.daq.query.model.Query;
@ -65,9 +63,6 @@ public class QueryRestConfig {
@Resource
private Environment env;
@Resource
private QueryRunMode queryRunMode;
@Bean
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
@ -103,11 +98,7 @@ public class QueryRestConfig {
@Bean
public Function<Query, QueryAnalyzer> queryAnalizerFactory() {
if (QueryRunMode.archiverappliance.equals(queryRunMode)) {
return (query) -> new ArchiverApplianceQueryAnalyzer(query);
} else {
return (query) -> new CassandraQueryAnalyzer(query);
}
return (query) -> new QueryAnalyzerImpl(query);
}
@Bean

View File

@ -26,10 +26,12 @@ import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.query.analyzer.QueryAnalyzer;
import ch.psi.daq.query.model.AbstractQuery;
import ch.psi.daq.query.model.Aggregation;
import ch.psi.daq.query.model.DBMode;
import ch.psi.daq.query.model.Query;
import ch.psi.daq.query.model.QueryField;
import ch.psi.daq.query.processor.QueryProcessor;
import ch.psi.daq.queryrest.config.QueryRestConfig;
import ch.psi.daq.queryrest.model.ChannelsRequest;
import ch.psi.daq.queryrest.response.ResponseStreamWriter;
@RestController
@ -38,51 +40,45 @@ public class QueryRestController {
private static final Logger LOGGER = LoggerFactory.getLogger(QueryRestController.class);
public static final String CHANNELS = "channels";
public static final String CHANNELS_REGEX = CHANNELS + "/{regex}";
public static final String QUERY = "query";
@Resource
private ResponseStreamWriter responseStreamWriter;
@Resource
private QueryProcessor queryProcessor;
private QueryProcessor cassandraQueryProcessor;
@Resource
private QueryProcessor archiverApplianceQueryProcessor;
@Resource
private Function<Query, QueryAnalyzer> queryAnalizerFactory;
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_FIELDS)
private Set<QueryField> defaultResponseFields;
@Resource(name = QueryRestConfig.BEAN_NAME_DEFAULT_RESPONSE_AGGREGATIONS)
private Set<Aggregation> defaultResponseAggregations;
@RequestMapping(
value = CHANNELS,
method = RequestMethod.GET,
method = {RequestMethod.GET, RequestMethod.POST},
produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody Collection<String> getChannels() throws Throwable {
public @ResponseBody Collection<String> getChannels(@RequestBody(required = false) ChannelsRequest request)
throws Throwable {
// in case not specified use default (e.g. GET)
if (request == null) {
request = new ChannelsRequest();
}
try {
return queryProcessor.getChannels();
return getQueryProcessor(request.getDbMode()).getChannels(request.getRegex());
} catch (Throwable t) {
LOGGER.error("Failed to query channel names.", t);
throw t;
}
}
@RequestMapping(
value = CHANNELS_REGEX,
method = RequestMethod.POST,
consumes = {MediaType.APPLICATION_JSON_VALUE},
produces = {MediaType.APPLICATION_JSON_VALUE})
public @ResponseBody Collection<String> getChannels(@RequestBody String regex) throws Throwable {
try {
return queryProcessor.getChannels(regex);
} catch (Throwable t) {
LOGGER.error("Failed to query channel names with regex '{}'.", regex, t);
throw t;
}
}
@RequestMapping(
value = QUERY,
method = RequestMethod.POST,
@ -98,7 +94,8 @@ public class QueryRestController {
extendQuery(query);
// all the magic happens here
Stream<Entry<String, Stream<? extends DataEvent>>> channelToDataEvents = queryProcessor.process(queryAnalizer);
Stream<Entry<String, Stream<? extends DataEvent>>> channelToDataEvents =
getQueryProcessor(query.getDBMode()).process(queryAnalizer);
// do post-process
Stream<Entry<String, ?>> channelToData = queryAnalizer.postProcess(channelToDataEvents);
@ -111,11 +108,22 @@ public class QueryRestController {
}
}
private QueryProcessor getQueryProcessor(DBMode dbMode) {
if (DBMode.databuffer.equals(dbMode)) {
return cassandraQueryProcessor;
} else if (DBMode.archiverappliance.equals(dbMode)) {
return archiverApplianceQueryProcessor;
} else {
LOGGER.error("Unknown DBMode '{}'!", dbMode);
throw new IllegalArgumentException(String.format("Unknown DBMode '%s'", dbMode));
}
}
private void extendQuery(AbstractQuery query) {
if (query.getFields() == null || query.getFields().isEmpty()) {
query.setFields(new LinkedHashSet<>(defaultResponseFields));
}
if(query.getAggregations() == null || query.getAggregations().isEmpty()){
if (query.getAggregations() == null || query.getAggregations().isEmpty()) {
query.setAggregations(new LinkedList<>(defaultResponseAggregations));
}
}

View File

@ -0,0 +1,37 @@
package ch.psi.daq.queryrest.model;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import ch.psi.daq.query.model.DBMode;
// as RequestBody due to special chars in regex
@JsonInclude(Include.NON_DEFAULT)
public class ChannelsRequest {
private DBMode dbMode = DBMode.databuffer;
// null for no regex
private String regex = null;
public ChannelsRequest() {}
public ChannelsRequest(DBMode dbMode, String regex) {
this.regex = regex;
this.dbMode = dbMode;
}
public DBMode getDbMode() {
return dbMode;
}
public void setDbMode(DBMode dbMode) {
this.dbMode = dbMode;
}
public String getRegex() {
return regex;
}
public void setRegex(String regex) {
this.regex = regex;
}
}

View File

@ -4,4 +4,15 @@ server.port=8080
# defines the fields that are included in the response
# if no fields have been specified by the user
queryrest.default.response.fields=channel,pulseId,globalMillis,globalNanos,value
queryrest.default.response.aggregations=min,max,sum
queryrest.default.response.aggregations=min,max,sum
# defines the list of hosts who are tried for an initial connection to the cluster
hazelcast.query.initialcandidates=localhost
cassandra.basekeyspace=daq_query_test
# defines the cluster group and its password
hazelcast.query.group.name=QueryClusterTest
hazelcast.query.group.password=a3&PvvHh7f#6HjAx5Da$

View File

@ -8,9 +8,8 @@ import org.springframework.context.annotation.PropertySources;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
import ch.psi.daq.domain.reader.DataReader;
import ch.psi.daq.query.processor.QueryProcessorLocal;
import ch.psi.daq.query.processor.QueryProcessor;
import ch.psi.daq.query.processor.cassandra.CassandraQueryProcessorLocal;
import ch.psi.daq.test.query.config.LocalQueryTestConfig;
import ch.psi.daq.test.queryrest.query.DummyDataReader;
@ -26,14 +25,9 @@ public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
@Import(value = {LocalQueryTestConfig.class})
static class InnerConfiguration {
}
@Bean
public QueryProcessor queryProcessor() {
return new CassandraQueryProcessorLocal();
return new QueryProcessorLocal(new DummyDataReader());
}
@Bean
public DataReader dataReader() {
return new DummyDataReader();
}
}
}